@@ -107,7 +107,7 @@ def _read_text(
107107 paths = paths ,
108108 boto3_session = session ,
109109 chunksize = chunksize ,
110- pandas_args = pandas_kwargs ,
110+ pandas_kwargs = pandas_kwargs ,
111111 s3_additional_kwargs = s3_additional_kwargs ,
112112 dataset = dataset ,
113113 path_root = path_root ,
@@ -120,7 +120,7 @@ def _read_text(
120120 parser_func = parser_func ,
121121 path = p ,
122122 boto3_session = session ,
123- pandas_args = pandas_kwargs ,
123+ pandas_kwargs = pandas_kwargs ,
124124 s3_additional_kwargs = s3_additional_kwargs ,
125125 dataset = dataset ,
126126 path_root = path_root ,
@@ -156,7 +156,7 @@ def _read_text_chunksize(
156156 paths : List [str ],
157157 boto3_session : boto3 .Session ,
158158 chunksize : int ,
159- pandas_args : Dict [str , Any ],
159+ pandas_kwargs : Dict [str , Any ],
160160 s3_additional_kwargs : Optional [Dict [str , str ]] = None ,
161161 dataset : bool = False ,
162162) -> Iterator [pd .DataFrame ]:
@@ -166,9 +166,9 @@ def _read_text_chunksize(
166166 partitions : Dict [str , Any ] = {}
167167 if dataset is True :
168168 partitions = _utils .extract_partitions_from_path (path_root = path_root , path = path )
169- if pandas_args .get ("compression" , "infer" ) == "infer" :
170- pandas_args ["compression" ] = infer_compression (path , compression = "infer" )
171- mode : str = "r" if pandas_args .get ("compression" ) is None else "rb"
169+ if pandas_kwargs .get ("compression" , "infer" ) == "infer" :
170+ pandas_kwargs ["compression" ] = infer_compression (path , compression = "infer" )
171+ mode : str = "r" if pandas_kwargs .get ("compression" ) is None else "rb"
172172 with fs .open (path , mode ) as f :
173173 reader : pandas .io .parsers .TextFileReader = parser_func (f , chunksize = chunksize , ** pandas_args )
174174 for df in reader :
@@ -183,18 +183,18 @@ def _read_text_full(
183183 path_root : str ,
184184 path : str ,
185185 boto3_session : Union [boto3 .Session , Dict [str , Optional [str ]]],
186- pandas_args : Dict [str , Any ],
186+ pandas_kwargs : Dict [str , Any ],
187187 s3_additional_kwargs : Optional [Dict [str , str ]] = None ,
188188 dataset : bool = False ,
189189) -> pd .DataFrame :
190190 fs : s3fs .S3FileSystem = _utils .get_fs (session = boto3_session , s3_additional_kwargs = s3_additional_kwargs )
191- if pandas_args .get ("compression" , "infer" ) == "infer" :
192- pandas_args ["compression" ] = infer_compression (path , compression = "infer" )
193- mode : str = "r" if pandas_args .get ("compression" ) is None else "rb"
194- encoding : Optional [str ] = pandas_args .get ("encoding" , None )
195- newline : Optional [str ] = pandas_args .get ("lineterminator" , None )
191+ if pandas_kwargs .get ("compression" , "infer" ) == "infer" :
192+ pandas_kwargs ["compression" ] = infer_compression (path , compression = "infer" )
193+ mode : str = "r" if pandas_kwargs .get ("compression" ) is None else "rb"
194+ encoding : Optional [str ] = pandas_kwargs .get ("encoding" , None )
195+ newline : Optional [str ] = pandas_kwargs .get ("lineterminator" , None )
196196 with fs .open (path = path , mode = mode , encoding = encoding , newline = newline ) as f :
197- df : pd .DataFrame = parser_func (f , ** pandas_args )
197+ df : pd .DataFrame = parser_func (f , ** pandas_kwargs )
198198 if dataset is True :
199199 partitions : Dict [str , Any ] = _utils .extract_partitions_from_path (path_root = path_root , path = path )
200200 for column_name , value in partitions .items ():
0 commit comments