Skip to content

Commit 3e4f2e5

Browse files
authored
Merge pull request #174 from awslabs/boto3-session-missing
Fix missing boto3_session argument #172
2 parents 1b33269 + 6a55fdc commit 3e4f2e5

File tree

1 file changed

+6
-5
lines changed

1 file changed

+6
-5
lines changed

awswrangler/s3.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -694,12 +694,13 @@ def to_parquet( # pylint: disable=too-many-arguments
694694
)
695695
if df.empty is True:
696696
raise exceptions.EmptyDataFrame()
697+
session: boto3.Session = _utils.ensure_session(session=boto3_session)
697698
partition_cols = partition_cols if partition_cols else []
698699
dtype = dtype if dtype else {}
699700
columns_comments = columns_comments if columns_comments else {}
700701
partitions_values: Dict[str, List[str]] = {}
701702
cpus: int = _utils.ensure_cpu_count(use_threads=use_threads)
702-
fs: s3fs.S3FileSystem = _utils.get_fs(session=boto3_session, s3_additional_kwargs=s3_additional_kwargs)
703+
fs: s3fs.S3FileSystem = _utils.get_fs(session=session, s3_additional_kwargs=s3_additional_kwargs)
703704
compression_ext: Optional[str] = _COMPRESSION_2_EXT.get(compression, None)
704705
if compression_ext is None:
705706
raise exceptions.InvalidCompression(f"{compression} is invalid, please use None, snappy or gzip.")
@@ -739,7 +740,7 @@ def to_parquet( # pylint: disable=too-many-arguments
739740
partition_cols=partition_cols,
740741
dtype=dtype,
741742
mode=mode,
742-
boto3_session=boto3_session,
743+
boto3_session=session,
743744
)
744745
if (database is not None) and (table is not None):
745746
columns_types, partitions_types = _data_types.athena_types_from_pandas_partitioned(
@@ -755,7 +756,7 @@ def to_parquet( # pylint: disable=too-many-arguments
755756
description=description,
756757
parameters=parameters,
757758
columns_comments=columns_comments,
758-
boto3_session=boto3_session,
759+
boto3_session=session,
759760
mode="overwrite",
760761
)
761762
if partitions_values:
@@ -765,7 +766,7 @@ def to_parquet( # pylint: disable=too-many-arguments
765766
table=table,
766767
partitions_values=partitions_values,
767768
compression=compression,
768-
boto3_session=boto3_session,
769+
boto3_session=session,
769770
)
770771
return {"paths": paths, "partitions_values": partitions_values}
771772

@@ -811,7 +812,7 @@ def _to_parquet_dataset(
811812
subdir = "/".join([f"{name}={val}" for name, val in zip(partition_cols, keys)])
812813
prefix: str = f"{path}{subdir}/"
813814
if mode == "overwrite_partitions":
814-
delete_objects(path=prefix, use_threads=use_threads)
815+
delete_objects(path=prefix, use_threads=use_threads, boto3_session=boto3_session)
815816
file_path = f"{prefix}{uuid.uuid4().hex}{compression_ext}.parquet"
816817
_to_parquet_file(
817818
df=subgroup,

0 commit comments

Comments
 (0)