Skip to content

Commit 9dd6b70

Browse files
Change to bindings
1 parent b2fa419 commit 9dd6b70

File tree

3 files changed

+45
-28
lines changed

3 files changed

+45
-28
lines changed

DESCRIPTION.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ Source code is also available at: https://github.com/snowflakedb/snowflake-conne
99
# Release Notes
1010
- v3.16(TBD)
1111
- Added basic arrow support for Interval types.
12-
- Fix `write_pandas` special characters usage in the location name.
12+
- Fix `write_pandas` special characters usage in the location name.
1313

1414
- v3.15.0(Apr 29,2025)
1515
- Bumped up min boto and botocore version to 1.24.

src/snowflake/connector/pandas_tools.py

Lines changed: 38 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,6 @@ def build_location_helper(
6868

6969
def _escape_part_location(part: str, should_quote: bool) -> str:
7070
if "'" in part:
71-
part = part.replace("'", "\\'")
7271
should_quote = True
7372
if should_quote:
7473
if not part.startswith('"'):
@@ -87,12 +86,14 @@ def _do_create_temp_stage(
8786
overwrite: bool,
8887
use_scoped_temp_object: bool,
8988
) -> None:
90-
create_stage_sql = f"CREATE {get_temp_type_for_object(use_scoped_temp_object)} STAGE /* Python:snowflake.connector.pandas_tools.write_pandas() */ identifier('{stage_location}') FILE_FORMAT=(TYPE=PARQUET COMPRESSION={compression}{' BINARY_AS_TEXT=FALSE' if auto_create_table or overwrite else ''})"
91-
logger.debug(f"creating stage with '{create_stage_sql}'.")
89+
create_stage_sql = f"CREATE {get_temp_type_for_object(use_scoped_temp_object)} STAGE /* Python:snowflake.connector.pandas_tools.write_pandas() */ identifier(?) FILE_FORMAT=(TYPE=PARQUET COMPRESSION={compression}{' BINARY_AS_TEXT=FALSE' if auto_create_table or overwrite else ''})"
90+
params = (stage_location,)
91+
logger.debug(f"creating stage with '{create_stage_sql}'. params: %s", params)
9292
cursor.execute(
9393
create_stage_sql,
9494
_is_internal=True,
9595
_force_qmark_paramstyle=True,
96+
params=params,
9697
num_statements=1,
9798
)
9899

@@ -154,15 +155,17 @@ def _do_create_temp_file_format(
154155
use_scoped_temp_object: bool,
155156
) -> None:
156157
file_format_sql = (
157-
f"CREATE {get_temp_type_for_object(use_scoped_temp_object)} FILE FORMAT identifier('{file_format_location}') "
158+
f"CREATE {get_temp_type_for_object(use_scoped_temp_object)} FILE FORMAT identifier(?) "
158159
f"/* Python:snowflake.connector.pandas_tools.write_pandas() */ "
159160
f"TYPE=PARQUET COMPRESSION={compression}{sql_use_logical_type}"
160161
)
161-
logger.debug(f"creating file format with '{file_format_sql}'.")
162+
params = (file_format_location,)
163+
logger.debug(f"creating file format with '{file_format_sql}'. params: %s", params)
162164
cursor.execute(
163165
file_format_sql,
164166
_is_internal=True,
165167
_force_qmark_paramstyle=True,
168+
params=params,
166169
num_statements=1,
167170
)
168171

@@ -442,17 +445,18 @@ def write_pandas(
442445
# Upload parquet file
443446
upload_sql = (
444447
"PUT /* Python:snowflake.connector.pandas_tools.write_pandas() */ "
445-
"'file://{path}' '{stage_location}' PARALLEL={parallel}"
448+
"'file://{path}' ? PARALLEL={parallel}"
446449
).format(
447450
path=chunk_path.replace("\\", "\\\\").replace("'", "\\'"),
448-
stage_location="@" + stage_location,
449451
parallel=parallel,
450452
)
451-
logger.debug(f"uploading files with '{upload_sql}'")
453+
params = ("@" + stage_location,)
454+
logger.debug(f"uploading files with '{upload_sql}', params: %s", params)
452455
cursor.execute(
453456
upload_sql,
454457
_is_internal=True,
455458
_force_qmark_paramstyle=True,
459+
params=params,
456460
num_statements=1,
457461
)
458462
# Remove chunk file
@@ -471,12 +475,15 @@ def write_pandas(
471475
columns = quote + f"{quote},{quote}".join(snowflake_column_names) + quote
472476

473477
def drop_object(name: str, object_type: str) -> None:
474-
drop_sql = f"DROP {object_type.upper()} IF EXISTS identifier('{name}') /* Python:snowflake.connector.pandas_tools.write_pandas() */"
475-
logger.debug(f"dropping {object_type} with '{drop_sql}'.")
478+
drop_sql = f"DROP {object_type.upper()} IF EXISTS identifier(?) /* Python:snowflake.connector.pandas_tools.write_pandas() */"
479+
params = (name,)
480+
logger.debug(f"dropping {object_type} with '{drop_sql}'. params: %s", params)
481+
476482
cursor.execute(
477483
drop_sql,
478484
_is_internal=True,
479485
_force_qmark_paramstyle=True,
486+
params=params,
480487
num_statements=1,
481488
)
482489

@@ -525,15 +532,19 @@ def drop_object(name: str, object_type: str) -> None:
525532
)
526533

527534
create_table_sql = (
528-
f"CREATE {table_type.upper()} {iceberg}TABLE IF NOT EXISTS identifier('{target_table_location}') "
535+
f"CREATE {table_type.upper()} {iceberg}TABLE IF NOT EXISTS identifier(?) "
529536
f"({create_table_columns}) {iceberg_config_statement}"
530537
f" /* Python:snowflake.connector.pandas_tools.write_pandas() */ "
531538
)
532-
logger.debug(f"auto creating table with '{create_table_sql}'.")
539+
params = (target_table_location,)
540+
logger.debug(
541+
f"auto creating table with '{create_table_sql}'. params: %s", params
542+
)
533543
cursor.execute(
534544
create_table_sql,
535545
_is_internal=True,
536546
_force_qmark_paramstyle=True,
547+
params=params,
537548
num_statements=1,
538549
)
539550
# need explicit casting when the underlying table schema is inferred
@@ -554,19 +565,22 @@ def drop_object(name: str, object_type: str) -> None:
554565

555566
try:
556567
if overwrite and (not auto_create_table):
557-
truncate_sql = f"TRUNCATE TABLE identifier('{target_table_location}') /* Python:snowflake.connector.pandas_tools.write_pandas() */"
558-
logger.debug(f"truncating table with '{truncate_sql}'")
568+
truncate_sql = "TRUNCATE TABLE identifier(?) /* Python:snowflake.connector.pandas_tools.write_pandas() */"
569+
params = (target_table_location,)
570+
logger.debug(f"truncating table with '{truncate_sql}'. params: %s", params)
559571
cursor.execute(
560572
truncate_sql,
561573
_is_internal=True,
562574
_force_qmark_paramstyle=True,
575+
params=params,
563576
num_statements=1,
564577
)
565578

579+
copy_stage_location = "@" + stage_location.replace("'", "\\'")
566580
copy_into_sql = (
567-
f"COPY INTO identifier('{target_table_location}') /* Python:snowflake.connector.pandas_tools.write_pandas() */ "
581+
f"COPY INTO identifier(?) /* Python:snowflake.connector.pandas_tools.write_pandas() */ "
568582
f"({columns}) "
569-
f"FROM (SELECT {parquet_columns} FROM '@{stage_location}') "
583+
f"FROM (SELECT {parquet_columns} FROM '{copy_stage_location}') "
570584
f"FILE_FORMAT=("
571585
f"TYPE=PARQUET "
572586
f"COMPRESSION={compression_map[compression]}"
@@ -575,7 +589,10 @@ def drop_object(name: str, object_type: str) -> None:
575589
f") "
576590
f"PURGE=TRUE ON_ERROR=?"
577591
)
578-
params = (on_error,)
592+
params = (
593+
target_table_location,
594+
on_error,
595+
)
579596
logger.debug(f"copying into with '{copy_into_sql}'. params: %s", params)
580597
copy_results = cursor.execute(
581598
copy_into_sql,
@@ -593,12 +610,14 @@ def drop_object(name: str, object_type: str) -> None:
593610
quote_identifiers=quote_identifiers,
594611
)
595612
drop_object(original_table_location, "table")
596-
rename_table_sql = f"ALTER TABLE identifier('{target_table_location}') RENAME TO identifier('{original_table_location}') /* Python:snowflake.connector.pandas_tools.write_pandas() */"
597-
logger.debug(f"rename table with '{rename_table_sql}'.")
613+
rename_table_sql = "ALTER TABLE identifier(?) RENAME TO identifier(?) /* Python:snowflake.connector.pandas_tools.write_pandas() */"
614+
params = (target_table_location, original_table_location)
615+
logger.debug(f"rename table with '{rename_table_sql}'. params: %s", params)
598616
cursor.execute(
599617
rename_table_sql,
600618
_is_internal=True,
601619
_force_qmark_paramstyle=True,
620+
params=params,
602621
num_statements=1,
603622
)
604623
except ProgrammingError:

test/integ/pandas/test_pandas_tools.py

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -535,7 +535,7 @@ def test_table_location_building(
535535

536536
def mocked_execute(*args, **kwargs):
537537
if len(args) >= 1 and args[0].startswith("COPY INTO"):
538-
assert expected_location in args[0]
538+
assert kwargs["params"][0] == expected_location
539539
cur = SnowflakeCursor(cnx)
540540
cur._result = iter([])
541541
return cur
@@ -566,8 +566,8 @@ def mocked_execute(*args, **kwargs):
566566
(None, "schema", False, "schema"),
567567
(None, None, True, ""),
568568
(None, None, False, ""),
569-
("data'base", "schema", True, '"data\\\'base"."schema"'),
570-
("data'base", "schema", False, '"data\\\'base".schema'),
569+
("data'base", "schema", True, '"data\'base"."schema"'),
570+
("data'base", "schema", False, '"data\'base".schema'),
571571
],
572572
)
573573
def test_stage_location_building(
@@ -583,11 +583,9 @@ def test_stage_location_building(
583583
with conn_cnx() as cnx:
584584

585585
def mocked_execute(*args, **kwargs):
586-
if len(args) >= 1 and args[0].lower().startswith("create temp stage"):
587-
location_identifier = re.search(
588-
r"identifier\(\'(.*?)\)", args[0]
589-
).group(1)
590-
assert location_identifier.startswith(expected_db_schema)
586+
if len(args) >= 1 and args[0].startswith("create temporary stage"):
587+
db_schema = ".".join(args[0].split(" ")[-1].split(".")[:-1])
588+
assert db_schema == expected_db_schema
591589
cur = SnowflakeCursor(cnx)
592590
cur._result = iter([])
593591
return cur

0 commit comments

Comments
 (0)