@@ -64,8 +64,7 @@ def read_parquet(
6464 """
6565 if table_partition_cols is None :
6666 table_partition_cols = []
67- return DataFrame (
68- SessionContext .global_ctx ().read_parquet (
67+ return SessionContext .global_ctx ().read_parquet (
6968 str (path ),
7069 table_partition_cols ,
7170 parquet_pruning ,
@@ -74,7 +73,6 @@ def read_parquet(
7473 schema ,
7574 file_sort_order ,
7675 )
77- )
7876
7977
8078def read_json (
@@ -106,16 +104,14 @@ def read_json(
106104 """
107105 if table_partition_cols is None :
108106 table_partition_cols = []
109- return DataFrame (
110- SessionContext .global_ctx ().read_json (
107+ return SessionContext .global_ctx ().read_json (
111108 str (path ),
112109 schema ,
113110 schema_infer_max_records ,
114111 file_extension ,
115112 table_partition_cols ,
116113 file_compression_type ,
117114 )
118- )
119115
120116
121117def read_csv (
@@ -157,8 +153,7 @@ def read_csv(
157153
158154 path = [str (p ) for p in path ] if isinstance (path , list ) else str (path )
159155
160- return DataFrame (
161- SessionContext .global_ctx ().read_csv (
156+ return SessionContext .global_ctx ().read_csv (
162157 path ,
163158 schema ,
164159 has_header ,
@@ -168,7 +163,6 @@ def read_csv(
168163 table_partition_cols ,
169164 file_compression_type ,
170165 )
171- )
172166
173167
174168def read_avro (
@@ -194,8 +188,6 @@ def read_avro(
194188 """
195189 if file_partition_cols is None :
196190 file_partition_cols = []
197- return DataFrame (
198- SessionContext .global_ctx ().read_avro (
191+ return SessionContext .global_ctx ().read_avro (
199192 str (path ), schema , file_partition_cols , file_extension
200193 )
201- )
0 commit comments