Skip to content

Commit 8a37b68

Browse files
danielwoigorborgest
authored andcommitted
[feat] added raw s3 credential authentication for Redshift copy commands
1 parent 8073390 commit 8a37b68

File tree

1 file changed

+62
-2
lines changed

1 file changed

+62
-2
lines changed

awswrangler/redshift.py

Lines changed: 62 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,9 @@ def _copy(
9191
path: str,
9292
table: str,
9393
iam_role: Optional[str] = None,
94+
aws_access_key_id: Optional[str] = None,
95+
aws_secret_access_key: Optional[str] = None,
96+
aws_session_token: Optional[str] = None,
9497
boto3_session: Optional[str] = None,
9598
schema: Optional[str] = None,
9699
) -> None:
@@ -99,7 +102,13 @@ def _copy(
99102
else:
100103
table_name = f'"{schema}"."{table}"'
101104

102-
auth_str: str = _make_s3_auth_string(iam_role=iam_role, boto3_session=boto3_session)
105+
auth_str: str = _make_s3_auth_string(
106+
iam_role=iam_role,
107+
aws_access_key_id=aws_access_key_id,
108+
aws_secret_access_key=aws_secret_access_key,
109+
aws_session_token=aws_session_token,
110+
boto3_session=boto3_session,
111+
)
103112
sql: str = f"COPY {table_name} FROM '{path}'{auth_str}\nFORMAT AS PARQUET"
104113
_logger.debug("copy query:\n%s", sql)
105114
cursor.execute(sql)
@@ -742,6 +751,9 @@ def unload_to_files(
742751
path: str,
743752
con: redshift_connector.Connection,
744753
iam_role: Optional[str] = None,
754+
aws_access_key_id: Optional[str] = None,
755+
aws_secret_access_key: Optional[str] = None,
756+
aws_session_token: Optional[str] = None,
745757
region: Optional[str] = None,
746758
max_file_size: Optional[float] = None,
747759
kms_key_id: Optional[str] = None,
@@ -770,6 +782,12 @@ def unload_to_files(
770782
"credentials directly or wr.redshift.connect() to fetch it from the Glue Catalog.
771783
iam_role : str, optional
772784
AWS IAM role with the related permissions.
785+
aws_access_key_id : str, optional
786+
The access key for your AWS account.
787+
aws_secret_access_key : str, optional
788+
The secret key for your AWS account.
789+
aws_session_token : str, optional
790+
The session key for your AWS account. This is only needed when you are using temporary credentials.
773791
region : str, optional
774792
Specifies the AWS Region where the target Amazon S3 bucket is located.
775793
REGION is required for UNLOAD to an Amazon S3 bucket that isn't in the
@@ -821,7 +839,13 @@ def unload_to_files(
821839
max_file_size_str: str = f"\nMAXFILESIZE AS {max_file_size} MB" if max_file_size is not None else ""
822840
kms_key_id_str: str = f"\nKMS_KEY_ID '{kms_key_id}'" if kms_key_id is not None else ""
823841

824-
auth_str: str = _make_s3_auth_string(iam_role=iam_role, boto3_session=boto3_session)
842+
auth_str: str = _make_s3_auth_string(
843+
iam_role=iam_role,
844+
aws_access_key_id=aws_access_key_id,
845+
aws_secret_access_key=aws_secret_access_key,
846+
aws_session_token=aws_session_token,
847+
boto3_session=boto3_session,
848+
)
825849

826850
sql = (
827851
f"UNLOAD ('{sql}')\n"
@@ -846,6 +870,9 @@ def unload(
846870
path: str,
847871
con: redshift_connector.Connection,
848872
iam_role: Optional[str],
873+
aws_access_key_id: Optional[str] = None,
874+
aws_secret_access_key: Optional[str] = None,
875+
aws_session_token: Optional[str] = None,
849876
region: Optional[str] = None,
850877
max_file_size: Optional[float] = None,
851878
kms_key_id: Optional[str] = None,
@@ -900,6 +927,12 @@ def unload(
900927
"credentials directly or wr.redshift.connect() to fetch it from the Glue Catalog.
901928
iam_role : str, optional
902929
AWS IAM role with the related permissions.
930+
aws_access_key_id : str, optional
931+
The access key for your AWS account.
932+
aws_secret_access_key : str, optional
933+
The secret key for your AWS account.
934+
aws_session_token : str, optional
935+
The session key for your AWS account. This is only needed when you are using temporary credentials.
903936
region : str, optional
904937
Specifies the AWS Region where the target Amazon S3 bucket is located.
905938
REGION is required for UNLOAD to an Amazon S3 bucket that isn't in the
@@ -954,6 +987,9 @@ def unload(
954987
path=path,
955988
con=con,
956989
iam_role=iam_role,
990+
aws_access_key_id=aws_access_key_id,
991+
aws_secret_access_key=aws_secret_access_key,
992+
aws_session_token=aws_session_token,
957993
region=region,
958994
max_file_size=max_file_size,
959995
kms_key_id=kms_key_id,
@@ -991,6 +1027,9 @@ def copy_from_files( # pylint: disable=too-many-locals,too-many-arguments
9911027
table: str,
9921028
schema: str,
9931029
iam_role: Optional[str] = None,
1030+
aws_access_key_id: Optional[str] = None,
1031+
aws_secret_access_key: Optional[str] = None,
1032+
aws_session_token: Optional[str] = None,
9941033
parquet_infer_sampling: float = 1.0,
9951034
mode: str = "append",
9961035
diststyle: str = "AUTO",
@@ -1035,6 +1074,12 @@ def copy_from_files( # pylint: disable=too-many-locals,too-many-arguments
10351074
Schema name
10361075
iam_role : str, optional
10371076
AWS IAM role with the related permissions.
1077+
aws_access_key_id : str, optional
1078+
The access key for your AWS account.
1079+
aws_secret_access_key : str, optional
1080+
The secret key for your AWS account.
1081+
aws_session_token : str, optional
1082+
The session key for your AWS account. This is only needed when you are using temporary credentials.
10381083
parquet_infer_sampling : float
10391084
Random sample ratio of files that will have the metadata inspected.
10401085
Must be `0.0 < sampling <= 1.0`.
@@ -1130,6 +1175,9 @@ def copy_from_files( # pylint: disable=too-many-locals,too-many-arguments
11301175
table=created_table,
11311176
schema=created_schema,
11321177
iam_role=iam_role,
1178+
aws_access_key_id=aws_access_key_id,
1179+
aws_secret_access_key=aws_secret_access_key,
1180+
aws_session_token=aws_session_token,
11331181
boto3_session=boto3_session,
11341182
)
11351183
if table != created_table: # upsert
@@ -1150,6 +1198,9 @@ def copy( # pylint: disable=too-many-arguments
11501198
table: str,
11511199
schema: str,
11521200
iam_role: Optional[str] = None,
1201+
aws_access_key_id: Optional[str] = None,
1202+
aws_secret_access_key: Optional[str] = None,
1203+
aws_session_token: Optional[str] = None,
11531204
index: bool = False,
11541205
dtype: Optional[Dict[str, str]] = None,
11551206
mode: str = "append",
@@ -1205,6 +1256,12 @@ def copy( # pylint: disable=too-many-arguments
12051256
Schema name
12061257
iam_role : str, optional
12071258
AWS IAM role with the related permissions.
1259+
aws_access_key_id : str, optional
1260+
The access key for your AWS account.
1261+
aws_secret_access_key : str, optional
1262+
The secret key for your AWS account.
1263+
aws_session_token : str, optional
1264+
The session key for your AWS account. This is only needed when you are using temporary credentials.
12081265
index : bool
12091266
True to store the DataFrame index in file, otherwise False to ignore it.
12101267
dtype: Dict[str, str], optional
@@ -1293,6 +1350,9 @@ def copy( # pylint: disable=too-many-arguments
12931350
table=table,
12941351
schema=schema,
12951352
iam_role=iam_role,
1353+
aws_access_key_id=aws_access_key_id,
1354+
aws_secret_access_key=aws_secret_access_key,
1355+
aws_session_token=aws_session_token,
12961356
mode=mode,
12971357
diststyle=diststyle,
12981358
distkey=distkey,

0 commit comments

Comments
 (0)