Skip to content

Commit f1a8c17

Browse files
Fix Redshift unload breaking with hyphenated table names (#1762)
1 parent 65dc513 commit f1a8c17

File tree

3 files changed

+38
-10
lines changed

3 files changed

+38
-10
lines changed

awswrangler/redshift.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -184,30 +184,32 @@ def _upsert(
184184
_logger.debug("primary_keys: %s", primary_keys)
185185
if not primary_keys:
186186
raise exceptions.InvalidRedshiftPrimaryKeys()
187-
equals_clause: str = f"{table}.%s = {temp_table}.%s"
187+
equals_clause: str = f'"{table}".%s = "{temp_table}".%s'
188188
join_clause: str = " AND ".join([equals_clause % (pk, pk) for pk in primary_keys])
189189
if precombine_key:
190190
delete_from_target_filter: str = f"AND {table}.{precombine_key} <= {temp_table}.{precombine_key}"
191191
delete_from_temp_filter: str = f"AND {table}.{precombine_key} > {temp_table}.{precombine_key}"
192192
target_del_sql: str = (
193-
f'DELETE FROM "{schema}"."{table}" USING {temp_table} WHERE {join_clause} {delete_from_target_filter}'
193+
f'DELETE FROM "{schema}"."{table}" USING "{temp_table}" WHERE {join_clause} {delete_from_target_filter}'
194194
)
195195
_logger.debug(target_del_sql)
196196
cursor.execute(target_del_sql)
197197
source_del_sql: str = (
198-
f'DELETE FROM {temp_table} USING "{schema}"."{table}" WHERE {join_clause} {delete_from_temp_filter}'
198+
f'DELETE FROM "{temp_table}" USING "{schema}"."{table}" WHERE {join_clause} {delete_from_temp_filter}'
199199
)
200200
_logger.debug(source_del_sql)
201201
cursor.execute(source_del_sql)
202202
else:
203-
sql: str = f'DELETE FROM "{schema}"."{table}" USING {temp_table} WHERE {join_clause}'
203+
sql: str = f'DELETE FROM "{schema}"."{table}" USING "{temp_table}" WHERE {join_clause}'
204204
_logger.debug(sql)
205205
cursor.execute(sql)
206206
if column_names:
207207
column_names_str = ",".join(column_names)
208-
insert_sql = f'INSERT INTO "{schema}"."{table}"({column_names_str}) SELECT {column_names_str} FROM {temp_table}'
208+
insert_sql = (
209+
f'INSERT INTO "{schema}"."{table}"({column_names_str}) SELECT {column_names_str} FROM "{temp_table}"'
210+
)
209211
else:
210-
insert_sql = f'INSERT INTO "{schema}"."{table}" SELECT * FROM {temp_table}'
212+
insert_sql = f'INSERT INTO "{schema}"."{table}" SELECT * FROM "{temp_table}"'
211213
_logger.debug(insert_sql)
212214
cursor.execute(insert_sql)
213215
_drop_table(cursor=cursor, schema=schema, table=temp_table)

tests/conftest.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -251,6 +251,18 @@ def redshift_table():
251251
con.close()
252252

253253

254+
@pytest.fixture(scope="function")
255+
def redshift_table_with_hyphenated_name():
256+
name = f"tbl-{get_time_str_with_random_suffix()}"
257+
print(f"Table name: {name}")
258+
yield name
259+
con = wr.redshift.connect("aws-sdk-pandas-redshift")
260+
with con.cursor() as cursor:
261+
cursor.execute(f'DROP TABLE IF EXISTS public."{name}"')
262+
con.commit()
263+
con.close()
264+
265+
254266
@pytest.fixture(scope="function")
255267
def postgresql_table():
256268
name = f"tbl_{get_time_str_with_random_suffix()}"

tests/test_redshift.py

Lines changed: 18 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import random
44
import string
55
from decimal import Decimal
6+
from typing import Any, Dict
67

78
import boto3
89
import numpy as np
@@ -164,7 +165,12 @@ def test_copy_unload(path, redshift_table, redshift_con, databases_parameters):
164165
ensure_data_types(df=chunk, has_list=False)
165166

166167

167-
def test_copy_upsert(path, redshift_table, redshift_con, databases_parameters):
168+
def generic_test_copy_upsert(
169+
path: str,
170+
redshift_table: str,
171+
redshift_con: redshift_connector.Connection,
172+
databases_parameters: Dict[str, Any],
173+
) -> None:
168174
df = pd.DataFrame({"id": list((range(1_000))), "val": list(["foo" if i % 2 == 0 else "boo" for i in range(1_000)])})
169175
df3 = pd.DataFrame(
170176
{"id": list((range(1_000, 1_500))), "val": list(["foo" if i % 2 == 0 else "boo" for i in range(500)])}
@@ -185,7 +191,7 @@ def test_copy_upsert(path, redshift_table, redshift_con, databases_parameters):
185191
)
186192
path = f"{path}upsert/test_redshift_copy_upsert2/"
187193
df2 = wr.redshift.unload(
188-
sql=f"SELECT * FROM public.{redshift_table}",
194+
sql=f'SELECT * FROM public."{redshift_table}"',
189195
con=redshift_con,
190196
iam_role=databases_parameters["redshift"]["role"],
191197
path=path,
@@ -209,7 +215,7 @@ def test_copy_upsert(path, redshift_table, redshift_con, databases_parameters):
209215
)
210216
path = f"{path}upsert/test_redshift_copy_upsert4/"
211217
df4 = wr.redshift.unload(
212-
sql=f"SELECT * FROM public.{redshift_table}",
218+
sql=f'SELECT * FROM public."{redshift_table}"',
213219
con=redshift_con,
214220
iam_role=databases_parameters["redshift"]["role"],
215221
path=path,
@@ -232,7 +238,7 @@ def test_copy_upsert(path, redshift_table, redshift_con, databases_parameters):
232238
)
233239
path = f"{path}upsert/test_redshift_copy_upsert4/"
234240
df4 = wr.redshift.unload(
235-
sql=f"SELECT * FROM public.{redshift_table}",
241+
sql=f'SELECT * FROM public."{redshift_table}"',
236242
con=redshift_con,
237243
iam_role=databases_parameters["redshift"]["role"],
238244
path=path,
@@ -242,6 +248,14 @@ def test_copy_upsert(path, redshift_table, redshift_con, databases_parameters):
242248
assert len(df.columns) == len(df4.columns)
243249

244250

251+
def test_copy_upsert(path, redshift_table, redshift_con, databases_parameters):
252+
generic_test_copy_upsert(path, redshift_table, redshift_con, databases_parameters)
253+
254+
255+
def test_copy_upsert_hyphenated_name(path, redshift_table_with_hyphenated_name, redshift_con, databases_parameters):
256+
generic_test_copy_upsert(path, redshift_table_with_hyphenated_name, redshift_con, databases_parameters)
257+
258+
245259
@pytest.mark.parametrize(
246260
"diststyle,distkey,exc,sortstyle,sortkey",
247261
[

0 commit comments

Comments
 (0)