@@ -35,28 +35,28 @@ def test_data_frame_type_enum():
3535 """
3636 Test that the DataFrameType enum has the correct values.
3737 """
38- assert (
39- DataFrameType .PANDAS .value == "pandas "
40- ), f"Expected 'pandas' but got: { DataFrameType . PANDAS . value } "
41- assert (
42- DataFrameType .PYSPARK .value == "pyspark "
43- ), f"Expected 'pyspark' but got: { DataFrameType . PYSPARK . value } "
38+ assert DataFrameType . PANDAS . value == "pandas" , (
39+ f"Expected 'pandas' but got: { DataFrameType .PANDAS .value } "
40+ )
41+ assert DataFrameType . PYSPARK . value == "pyspark" , (
42+ f"Expected 'pyspark' but got: { DataFrameType .PYSPARK .value } "
43+ )
4444
4545 # Test string comparison (now works directly!)
4646 assert DataFrameType .PANDAS == "pandas" , "Expected DataFrameType.PANDAS == 'pandas' to be True"
47- assert (
48- DataFrameType .PYSPARK == " pyspark"
49- ), "Expected DataFrameType.PYSPARK == 'pyspark' to be True"
47+ assert DataFrameType . PYSPARK == "pyspark" , (
48+ "Expected DataFrameType.PYSPARK == ' pyspark' to be True "
49+ )
5050
5151
5252def test_get_expectation_name ():
5353 """
5454 Test that the expectation name is the class name.
5555 """
5656 expectation = MyTestExpectation ()
57- assert (
58- expectation .get_expectation_name () == "MyTestExpectation "
59- ), f"Expected 'MyTestExpectation' but got: { expectation . get_expectation_name () } "
57+ assert expectation . get_expectation_name () == "MyTestExpectation" , (
58+ f"Expected 'MyTestExpectation' but got: { expectation .get_expectation_name ()} "
59+ )
6060
6161
6262def test_validate_unsupported_dataframe_type ():
@@ -150,55 +150,55 @@ def test_infer_data_frame_type(spark):
150150 # Test pandas DataFrame
151151 pandas_df = pd .DataFrame ({"col1" : [1 , 2 , 3 ], "col2" : ["a" , "b" , "c" ]})
152152 data_frame_type = expectation .infer_data_frame_type (pandas_df )
153- assert (
154- data_frame_type == DataFrameType . PANDAS
155- ), f"Expected PANDAS type but got: { data_frame_type } "
153+ assert data_frame_type == DataFrameType . PANDAS , (
154+ f"Expected PANDAS type but got: { data_frame_type } "
155+ )
156156
157157 # Test PySpark DataFrame
158158 spark_df = spark .createDataFrame ([(1 , "a" ), (2 , "b" ), (3 , "c" )], ["col1" , "col2" ])
159159 data_frame_type = expectation .infer_data_frame_type (spark_df )
160- assert (
161- data_frame_type == DataFrameType . PYSPARK
162- ), f"Expected PYSPARK type but got: { data_frame_type } "
160+ assert data_frame_type == DataFrameType . PYSPARK , (
161+ f"Expected PYSPARK type but got: { data_frame_type } "
162+ )
163163
164164 # Test empty pandas DataFrame
165165 empty_pandas_df = pd .DataFrame (columns = ["col1" , "col2" ])
166166 data_frame_type = expectation .infer_data_frame_type (empty_pandas_df )
167- assert (
168- data_frame_type == DataFrameType . PANDAS
169- ), f"Expected PANDAS type for empty DataFrame but got: { data_frame_type } "
167+ assert data_frame_type == DataFrameType . PANDAS , (
168+ f"Expected PANDAS type for empty DataFrame but got: { data_frame_type } "
169+ )
170170
171171 # Test empty PySpark DataFrame
172172 empty_spark_df = spark .createDataFrame ([], "col1 INT, col2 STRING" )
173173 data_frame_type = expectation .infer_data_frame_type (empty_spark_df )
174- assert (
175- data_frame_type == DataFrameType . PYSPARK
176- ), f"Expected PYSPARK type for empty DataFrame but got: { data_frame_type } "
174+ assert data_frame_type == DataFrameType . PYSPARK , (
175+ f"Expected PYSPARK type for empty DataFrame but got: { data_frame_type } "
176+ )
177177
178178 # Test unsupported DataFrame types
179179 with pytest .raises (ValueError ) as context :
180180 expectation .infer_data_frame_type (None )
181- assert "Unsupported DataFrame type" in str (
182- context .value
183- ), f"Expected 'Unsupported DataFrame type' in error message but got: { str ( context . value ) } "
181+ assert "Unsupported DataFrame type" in str (context . value ), (
182+ f"Expected 'Unsupported DataFrame type' in error message but got: { str ( context .value ) } "
183+ )
184184
185185 with pytest .raises (ValueError ) as context :
186186 expectation .infer_data_frame_type ("not_a_dataframe" )
187- assert "Unsupported DataFrame type" in str (
188- context .value
189- ), f"Expected 'Unsupported DataFrame type' in error message but got: { str ( context . value ) } "
187+ assert "Unsupported DataFrame type" in str (context . value ), (
188+ f"Expected 'Unsupported DataFrame type' in error message but got: { str ( context .value ) } "
189+ )
190190
191191 with pytest .raises (ValueError ) as context :
192192 expectation .infer_data_frame_type ([1 , 2 , 3 ])
193- assert "Unsupported DataFrame type" in str (
194- context .value
195- ), f"Expected 'Unsupported DataFrame type' in error message but got: { str ( context . value ) } "
193+ assert "Unsupported DataFrame type" in str (context . value ), (
194+ f"Expected 'Unsupported DataFrame type' in error message but got: { str ( context .value ) } "
195+ )
196196
197197 with pytest .raises (ValueError ) as context :
198198 expectation .infer_data_frame_type ({"col1" : [1 , 2 , 3 ]})
199- assert "Unsupported DataFrame type" in str (
200- context .value
201- ), f"Expected 'Unsupported DataFrame type' in error message but got: { str ( context . value ) } "
199+ assert "Unsupported DataFrame type" in str (context . value ), (
200+ f"Expected 'Unsupported DataFrame type' in error message but got: { str ( context .value ) } "
201+ )
202202
203203 # Test with objects that might have similar attributes but aren't DataFrames
204204 class FakeDataFrame :
@@ -237,9 +237,9 @@ def test_infer_data_frame_type_with_connect_dataframe_available():
237237
238238 # Test that Connect DataFrame is identified as PYSPARK type
239239 data_frame_type = expectation .infer_data_frame_type (mock_connect_df )
240- assert (
241- data_frame_type == DataFrameType . PYSPARK
242- ), f"Expected PYSPARK type for Connect DataFrame but got: { data_frame_type } "
240+ assert data_frame_type == DataFrameType . PYSPARK , (
241+ f"Expected PYSPARK type for Connect DataFrame but got: { data_frame_type } "
242+ )
243243
244244
245245@patch ("dataframe_expectations.expectations.PySparkConnectDataFrame" , None )
@@ -252,15 +252,15 @@ def test_infer_data_frame_type_without_connect_support(spark):
252252 # Test that regular DataFrames still work when Connect is not available
253253 pandas_df = pd .DataFrame ({"col1" : [1 , 2 , 3 ]})
254254 data_frame_type = expectation .infer_data_frame_type (pandas_df )
255- assert (
256- data_frame_type == DataFrameType . PANDAS
257- ), f"Expected PANDAS type but got: { data_frame_type } "
255+ assert data_frame_type == DataFrameType . PANDAS , (
256+ f"Expected PANDAS type but got: { data_frame_type } "
257+ )
258258
259259 spark_df = spark .createDataFrame ([(1 ,), (2 ,), (3 ,)], ["col1" ])
260260 data_frame_type = expectation .infer_data_frame_type (spark_df )
261- assert (
262- data_frame_type == DataFrameType . PYSPARK
263- ), f"Expected PYSPARK type but got: { data_frame_type } "
261+ assert data_frame_type == DataFrameType . PYSPARK , (
262+ f"Expected PYSPARK type but got: { data_frame_type } "
263+ )
264264
265265
266266def test_infer_data_frame_type_connect_import_behavior (spark ):
@@ -297,6 +297,6 @@ def test_infer_data_frame_type_connect_import_behavior(spark):
297297 # Mock Connect DataFrame should be identified as PYSPARK
298298 mock_connect_df = MockConnectDataFrame ()
299299 result_type = expectation .infer_data_frame_type (mock_connect_df )
300- assert (
301- result_type == DataFrameType . PYSPARK
302- ), f"Expected PYSPARK type for Connect DataFrame but got: { result_type } "
300+ assert result_type == DataFrameType . PYSPARK , (
301+ f"Expected PYSPARK type for Connect DataFrame but got: { result_type } "
302+ )
0 commit comments