Skip to content

Commit cb3ad18

Browse files
authored
Add tests for the new redshift credentials approaches. (#490)
1 parent 8a37b68 commit cb3ad18

File tree

2 files changed

+96
-1
lines changed

2 files changed

+96
-1
lines changed

awswrangler/redshift.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -869,7 +869,7 @@ def unload(
869869
sql: str,
870870
path: str,
871871
con: redshift_connector.Connection,
872-
iam_role: Optional[str],
872+
iam_role: Optional[str] = None,
873873
aws_access_key_id: Optional[str] = None,
874874
aws_secret_access_key: Optional[str] = None,
875875
aws_session_token: Optional[str] = None,

tests/test_redshift.py

Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
import redshift_connector
1111

1212
import awswrangler as wr
13+
from awswrangler import _utils
1314

1415
from ._utils import dt, ensure_data_types, ensure_data_types_category, get_df, get_df_category, ts
1516

@@ -776,3 +777,97 @@ def test_connect_secret_manager(dbname):
776777
df = wr.redshift.read_sql_query("SELECT 1", con=con)
777778
con.close()
778779
assert df.shape == (1, 1)
780+
781+
782+
def test_copy_unload_session(path, redshift_table):
783+
df = pd.DataFrame({"col0": [1, 2, 3]})
784+
con = wr.redshift.connect("aws-data-wrangler-redshift")
785+
wr.redshift.copy(df=df, path=path, con=con, schema="public", table=redshift_table, mode="overwrite")
786+
df2 = wr.redshift.unload(
787+
sql=f"SELECT * FROM public.{redshift_table}",
788+
con=con,
789+
path=path,
790+
keep_files=False,
791+
)
792+
assert df2.shape == (3, 1)
793+
wr.redshift.copy(
794+
df=df,
795+
path=path,
796+
con=con,
797+
schema="public",
798+
table=redshift_table,
799+
mode="append",
800+
)
801+
df2 = wr.redshift.unload(
802+
sql=f"SELECT * FROM public.{redshift_table}",
803+
con=con,
804+
path=path,
805+
keep_files=False,
806+
)
807+
assert df2.shape == (6, 1)
808+
dfs = wr.redshift.unload(
809+
sql=f"SELECT * FROM public.{redshift_table}",
810+
con=con,
811+
path=path,
812+
keep_files=False,
813+
chunked=True,
814+
)
815+
for chunk in dfs:
816+
assert len(chunk.columns) == 1
817+
con.close()
818+
819+
820+
def test_copy_unload_creds(path, redshift_table):
821+
credentials = _utils.get_credentials_from_session()
822+
df = pd.DataFrame({"col0": [1, 2, 3]})
823+
con = wr.redshift.connect("aws-data-wrangler-redshift")
824+
wr.redshift.copy(
825+
df=df,
826+
path=path,
827+
con=con,
828+
schema="public",
829+
table=redshift_table,
830+
mode="overwrite",
831+
aws_access_key_id=credentials.access_key,
832+
aws_secret_access_key=credentials.secret_key,
833+
)
834+
df2 = wr.redshift.unload(
835+
sql=f"SELECT * FROM public.{redshift_table}",
836+
con=con,
837+
path=path,
838+
keep_files=False,
839+
aws_access_key_id=credentials.access_key,
840+
aws_secret_access_key=credentials.secret_key,
841+
)
842+
assert df2.shape == (3, 1)
843+
wr.redshift.copy(
844+
df=df,
845+
path=path,
846+
con=con,
847+
schema="public",
848+
table=redshift_table,
849+
mode="append",
850+
aws_access_key_id=credentials.access_key,
851+
aws_secret_access_key=credentials.secret_key,
852+
)
853+
df2 = wr.redshift.unload(
854+
sql=f"SELECT * FROM public.{redshift_table}",
855+
con=con,
856+
path=path,
857+
keep_files=False,
858+
aws_access_key_id=credentials.access_key,
859+
aws_secret_access_key=credentials.secret_key,
860+
)
861+
assert df2.shape == (6, 1)
862+
dfs = wr.redshift.unload(
863+
sql=f"SELECT * FROM public.{redshift_table}",
864+
con=con,
865+
path=path,
866+
keep_files=False,
867+
chunked=True,
868+
aws_access_key_id=credentials.access_key,
869+
aws_secret_access_key=credentials.secret_key,
870+
)
871+
for chunk in dfs:
872+
assert len(chunk.columns) == 1
873+
con.close()

0 commit comments

Comments
 (0)