@@ -438,25 +438,29 @@ def test_dtypes(self, spark):
438438 for col_name , col_type in dtypes :
439439 assert isinstance (col_name , str )
440440 assert isinstance (col_type , str )
441-
441+
442442 col_names = [name for name , _ in dtypes ]
443443 assert col_names == ["name" , "age" , "salary" ]
444444 for _ , col_type in dtypes :
445- assert len (col_type ) > 0
445+ assert len (col_type ) > 0
446446
447447 def test_dtypes_complex_types (self , spark ):
448448 from spark_namespace .sql .types import ArrayType , IntegerType , StringType , StructField , StructType
449- schema = StructType ([
450- StructField ("name" , StringType (), True ),
451- StructField ("scores" , ArrayType (IntegerType ()), True ),
452- StructField ("address" , StructType ([
453- StructField ("city" , StringType (), True ),
454- StructField ("zip" , StringType (), True )
455- ]), True )
456- ])
449+
450+ schema = StructType (
451+ [
452+ StructField ("name" , StringType (), True ),
453+ StructField ("scores" , ArrayType (IntegerType ()), True ),
454+ StructField (
455+ "address" ,
456+ StructType ([StructField ("city" , StringType (), True ), StructField ("zip" , StringType (), True )]),
457+ True ,
458+ ),
459+ ]
460+ )
457461 data = [
458462 ("Alice" , [90 , 85 , 88 ], {"city" : "NYC" , "zip" : "10001" }),
459- ("Bob" , [75 , 80 , 82 ], {"city" : "LA" , "zip" : "90001" })
463+ ("Bob" , [75 , 80 , 82 ], {"city" : "LA" , "zip" : "90001" }),
460464 ]
461465 df = spark .createDataFrame (data , schema )
462466 dtypes = df .dtypes
@@ -481,17 +485,21 @@ def test_printSchema(self, spark, capsys):
481485
482486 def test_printSchema_nested (self , spark , capsys ):
483487 from spark_namespace .sql .types import ArrayType , IntegerType , StringType , StructField , StructType
484- schema = StructType ([
485- StructField ("id" , IntegerType (), True ),
486- StructField ("person" , StructType ([
487- StructField ("name" , StringType (), True ),
488- StructField ("age" , IntegerType (), True )
489- ]), True ),
490- StructField ("hobbies" , ArrayType (StringType ()), True )
491- ])
488+
489+ schema = StructType (
490+ [
491+ StructField ("id" , IntegerType (), True ),
492+ StructField (
493+ "person" ,
494+ StructType ([StructField ("name" , StringType (), True ), StructField ("age" , IntegerType (), True )]),
495+ True ,
496+ ),
497+ StructField ("hobbies" , ArrayType (StringType ()), True ),
498+ ]
499+ )
492500 data = [
493501 (1 , {"name" : "Alice" , "age" : 25 }, ["reading" , "coding" ]),
494- (2 , {"name" : "Bob" , "age" : 30 }, ["gaming" , "music" ])
502+ (2 , {"name" : "Bob" , "age" : 30 }, ["gaming" , "music" ]),
495503 ]
496504 df = spark .createDataFrame (data , schema )
497505 df .printSchema ()
@@ -524,13 +532,16 @@ def test_treeString_basic(self, spark):
524532 def test_treeString_nested_struct (self , spark ):
525533 from spark_namespace .sql .types import IntegerType , StringType , StructField , StructType
526534
527- schema = StructType ([
528- StructField ("id" , IntegerType (), True ),
529- StructField ("person" , StructType ([
530- StructField ("name" , StringType (), True ),
531- StructField ("age" , IntegerType (), True )
532- ]), True )
533- ])
535+ schema = StructType (
536+ [
537+ StructField ("id" , IntegerType (), True ),
538+ StructField (
539+ "person" ,
540+ StructType ([StructField ("name" , StringType (), True ), StructField ("age" , IntegerType (), True )]),
541+ True ,
542+ ),
543+ ]
544+ )
534545 data = [(1 , {"name" : "Alice" , "age" : 25 })]
535546 df = spark .createDataFrame (data , schema )
536547 tree = df .schema .treeString ()
@@ -544,15 +555,21 @@ def test_treeString_nested_struct(self, spark):
544555 def test_treeString_with_level (self , spark ):
545556 from spark_namespace .sql .types import IntegerType , StringType , StructField , StructType
546557
547- schema = StructType ([
548- StructField ("id" , IntegerType (), True ),
549- StructField ("person" , StructType ([
550- StructField ("name" , StringType (), True ),
551- StructField ("details" , StructType ([
552- StructField ("address" , StringType (), True )
553- ]), True )
554- ]), True )
555- ])
558+ schema = StructType (
559+ [
560+ StructField ("id" , IntegerType (), True ),
561+ StructField (
562+ "person" ,
563+ StructType (
564+ [
565+ StructField ("name" , StringType (), True ),
566+ StructField ("details" , StructType ([StructField ("address" , StringType (), True )]), True ),
567+ ]
568+ ),
569+ True ,
570+ ),
571+ ]
572+ )
556573
557574 data = [(1 , {"name" : "Alice" , "details" : {"address" : "123 Main St" }})]
558575 df = spark .createDataFrame (data , schema )
@@ -562,16 +579,15 @@ def test_treeString_with_level(self, spark):
562579 assert " |-- id:" in tree_level_1
563580 assert " |-- person: struct" in tree_level_1
564581 # Should not show nested field names at level 1
565- lines = tree_level_1 .split (' \n ' )
566- assert len ([l for l in lines if l .strip ()]) <= 3
582+ lines = tree_level_1 .split (" \n " )
583+ assert len ([line for line in lines if line .strip ()]) <= 3
567584
568585 def test_treeString_array_type (self , spark ):
569586 from spark_namespace .sql .types import ArrayType , StringType , StructField , StructType
570587
571- schema = StructType ([
572- StructField ("name" , StringType (), True ),
573- StructField ("hobbies" , ArrayType (StringType ()), True )
574- ])
588+ schema = StructType (
589+ [StructField ("name" , StringType (), True ), StructField ("hobbies" , ArrayType (StringType ()), True )]
590+ )
575591
576592 data = [("Alice" , ["reading" , "coding" ])]
577593 df = spark .createDataFrame (data , schema )
0 commit comments