30
30
Union ,
31
31
)
32
32
33
+ import bigframes_vendored .constants as constants
33
34
import bigframes_vendored .pandas .io .gbq as vendored_pandas_gbq
34
35
from google .cloud import bigquery
35
36
import numpy
@@ -105,6 +106,7 @@ def read_csv(
105
106
Literal ["c" , "python" , "pyarrow" , "python-fwf" , "bigquery" ]
106
107
] = None ,
107
108
encoding : Optional [str ] = None ,
109
+ write_engine : constants .WriteEngineType = "default" ,
108
110
** kwargs ,
109
111
) -> bigframes .dataframe .DataFrame :
110
112
return global_session .with_default_session (
@@ -118,6 +120,7 @@ def read_csv(
118
120
dtype = dtype ,
119
121
engine = engine ,
120
122
encoding = encoding ,
123
+ write_engine = write_engine ,
121
124
** kwargs ,
122
125
)
123
126
@@ -135,6 +138,7 @@ def read_json(
135
138
encoding : Optional [str ] = None ,
136
139
lines : bool = False ,
137
140
engine : Literal ["ujson" , "pyarrow" , "bigquery" ] = "ujson" ,
141
+ write_engine : constants .WriteEngineType = "default" ,
138
142
** kwargs ,
139
143
) -> bigframes .dataframe .DataFrame :
140
144
return global_session .with_default_session (
@@ -145,6 +149,7 @@ def read_json(
145
149
encoding = encoding ,
146
150
lines = lines ,
147
151
engine = engine ,
152
+ write_engine = write_engine ,
148
153
** kwargs ,
149
154
)
150
155
@@ -245,24 +250,41 @@ def read_gbq_table(
245
250
246
251
247
252
@typing .overload
248
- def read_pandas (pandas_dataframe : pandas .DataFrame ) -> bigframes .dataframe .DataFrame :
253
+ def read_pandas (
254
+ pandas_dataframe : pandas .DataFrame ,
255
+ * ,
256
+ write_engine : constants .WriteEngineType = "default" ,
257
+ ) -> bigframes .dataframe .DataFrame :
249
258
...
250
259
251
260
252
261
@typing .overload
253
- def read_pandas (pandas_dataframe : pandas .Series ) -> bigframes .series .Series :
262
+ def read_pandas (
263
+ pandas_dataframe : pandas .Series ,
264
+ * ,
265
+ write_engine : constants .WriteEngineType = "default" ,
266
+ ) -> bigframes .series .Series :
254
267
...
255
268
256
269
257
270
@typing .overload
258
- def read_pandas (pandas_dataframe : pandas .Index ) -> bigframes .core .indexes .Index :
271
+ def read_pandas (
272
+ pandas_dataframe : pandas .Index ,
273
+ * ,
274
+ write_engine : constants .WriteEngineType = "default" ,
275
+ ) -> bigframes .core .indexes .Index :
259
276
...
260
277
261
278
262
- def read_pandas (pandas_dataframe : Union [pandas .DataFrame , pandas .Series , pandas .Index ]):
279
+ def read_pandas (
280
+ pandas_dataframe : Union [pandas .DataFrame , pandas .Series , pandas .Index ],
281
+ * ,
282
+ write_engine : constants .WriteEngineType = "default" ,
283
+ ):
263
284
return global_session .with_default_session (
264
285
bigframes .session .Session .read_pandas ,
265
286
pandas_dataframe ,
287
+ write_engine = write_engine ,
266
288
)
267
289
268
290
@@ -273,25 +295,32 @@ def read_pickle(
273
295
filepath_or_buffer : FilePath | ReadPickleBuffer ,
274
296
compression : CompressionOptions = "infer" ,
275
297
storage_options : StorageOptions = None ,
298
+ * ,
299
+ write_engine : constants .WriteEngineType = "default" ,
276
300
):
277
301
return global_session .with_default_session (
278
302
bigframes .session .Session .read_pickle ,
279
303
filepath_or_buffer = filepath_or_buffer ,
280
304
compression = compression ,
281
305
storage_options = storage_options ,
306
+ write_engine = write_engine ,
282
307
)
283
308
284
309
285
310
read_pickle .__doc__ = inspect .getdoc (bigframes .session .Session .read_pickle )
286
311
287
312
288
313
def read_parquet (
289
- path : str | IO ["bytes" ], * , engine : str = "auto"
314
+ path : str | IO ["bytes" ],
315
+ * ,
316
+ engine : str = "auto" ,
317
+ write_engine : constants .WriteEngineType = "default" ,
290
318
) -> bigframes .dataframe .DataFrame :
291
319
return global_session .with_default_session (
292
320
bigframes .session .Session .read_parquet ,
293
321
path ,
294
322
engine = engine ,
323
+ write_engine = write_engine ,
295
324
)
296
325
297
326
0 commit comments