Skip to content

Commit 755a9b1

Browse files
self-review
1 parent fa1addc commit 755a9b1

File tree

2 files changed

+3
-86
lines changed

2 files changed

+3
-86
lines changed

src/snowflake/connector/nanoarrow_cpp/ArrowIterator/CArrowTableIterator.cpp

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -657,7 +657,7 @@ void CArrowTableIterator::convertTimestampColumn_nanoarrow(
657657
ArrowArrayView* epochArray = nullptr;
658658
ArrowArrayView* fractionArray = nullptr;
659659
bool has_overflow_to_downscale = false;
660-
if (field->type == NANOARROW_TYPE_STRUCT) {
660+
if (scale > 6 && field->type == NANOARROW_TYPE_STRUCT) {
661661
for (int64_t i = 0; i < field->schema->n_children; i++) {
662662
ArrowSchema* c_schema = field->schema->children[i];
663663
if (std::strcmp(c_schema->name, internal::FIELD_NAME_EPOCH.c_str()) ==
@@ -670,10 +670,8 @@ void CArrowTableIterator::convertTimestampColumn_nanoarrow(
670670
// do nothing
671671
}
672672
}
673-
if (scale > 6) {
674-
has_overflow_to_downscale = _checkNanosecondTimestampOverflowAndDownscale(
675-
columnArray, epochArray, fractionArray);
676-
}
673+
has_overflow_to_downscale = _checkNanosecondTimestampOverflowAndDownscale(
674+
columnArray, epochArray, fractionArray);
677675
}
678676

679677
if (scale <= 6) {

test/integ/pandas_it/test_arrow_pandas.py

Lines changed: 0 additions & 81 deletions
Original file line numberDiff line numberDiff line change
@@ -1518,84 +1518,3 @@ def test_fetch_with_pandas_nullable_types(conn_cnx):
15181518
df = cursor_table.fetch_pandas_all(types_mapper=dtype_mapping.get)
15191519
pandas._testing.assert_series_equal(df.dtypes, expected_dtypes)
15201520
assert df.to_string() == expected_df_to_string
1521-
1522-
1523-
# @pytest.mark.parametrize(
1524-
# "timestamp_type", ["timestamp_ntz", "timestamp_ltz", "timestamp_tz"]
1525-
# )
1526-
# def test_convert_timestamp_overflow(conn_cnx, timestamp_type):
1527-
# """Test whether large timestamps are correctly falling back to microsecond precision."""
1528-
1529-
# def query(timestamp):
1530-
# if timestamp_type == "timestamp_tz":
1531-
# return f"SELECT CONVERT_TIMEZONE ('UTC', '{timestamp}') AS result"
1532-
# return f"SELECT '{timestamp}'::{timestamp_type} AS result"
1533-
1534-
# with conn_cnx() as cnx:
1535-
# cur = cnx.cursor()
1536-
1537-
# # Check that "large" dates are correctly falling back to microsecond precision
1538-
# cur.execute(query("2999-12-31 00:00:00.001234"))
1539-
# result = cur.fetchall()
1540-
# assert str(result[0][0]).startswith("2999-12-31 00:00:00.001234")
1541-
# result_pandas = cur.fetch_pandas_all()
1542-
# assert str(result_pandas.iloc[0, 0]).startswith("2999-12-31 00:00:00.001234")
1543-
1544-
# # Check that nanosecond precision is used for dates within the nanosecond range
1545-
# cur.execute(query("2000-12-31 00:00:00.001234567"))
1546-
# result_pandas = cur.fetch_arrow_all()
1547-
# result_pandas = cur.fetch_pandas_all()
1548-
# assert str(result_pandas.iloc[0, 0]).startswith("2999-12-31 00:00:00.001234")
1549-
1550-
# # Check that nanosecond precision used outside of nanosecond range throws an error
1551-
# cur.execute(query("2999-12-31 00:00:00.0012345678"))
1552-
# with pytest.raises(
1553-
# OverflowError,
1554-
# match=(
1555-
# "If you use a timestamp with the nanosecond part over 6-digits in the Snowflake database, "
1556-
# "the timestamp must be between '1677-09-21 00:12:43.145224192' and "
1557-
# "'2262-04-11 23:47:16.854775807' to not overflow."
1558-
# ),
1559-
# ):
1560-
# cur.fetch_pandas_all()
1561-
1562-
1563-
# def test_timestamp_ltz_overflow(conn_cnx):
1564-
# """Test TIMESTAMP_LTZ with microsecond fallback for year 2999.
1565-
1566-
# This test verifies that TIMESTAMP_LTZ dates beyond the nanosecond range automatically
1567-
# fall back to microsecond precision instead of failing.
1568-
# """
1569-
# with conn_cnx() as cnx:
1570-
# cur = cnx.cursor()
1571-
# cur.execute(SQL_ENABLE_ARROW)
1572-
1573-
# # Test with regular fetchone first - this should work fine
1574-
# result = cur.execute(
1575-
# "SELECT '2999-12-31 00:00:00.000'::timestamp_ltz AS result1"
1576-
# ).fetchone()
1577-
# # TIMESTAMP_LTZ will be converted to session timezone (UTC by default in tests)
1578-
# assert str(result[0]) == "2999-12-31 00:00:00+00:00"
1579-
1580-
# # Test with fetch_pandas_all - this should now work with microsecond fallback
1581-
# # instead of throwing an error or returning wrong data
1582-
# pandas_result = cur.execute(
1583-
# "SELECT '2999-12-31 00:00:00.000'::timestamp_ltz AS result1"
1584-
# ).fetch_pandas_all()
1585-
1586-
# # Check that we got a DataFrame with one row and one column
1587-
# assert pandas_result.shape == (1, 1)
1588-
# assert pandas_result.columns[0] == "RESULT1"
1589-
1590-
# # Check the actual timestamp value - should be correct year 2999
1591-
# timestamp_value = pandas_result.iloc[0, 0]
1592-
# assert str(timestamp_value) == "2999-12-31 00:00:00+00:00"
1593-
1594-
# # Test with a date within the nanosecond range (should use nanoseconds)
1595-
# pandas_result_2200 = cur.execute(
1596-
# "SELECT '2200-12-31 00:00:00.000'::timestamp_ltz AS result1"
1597-
# ).fetch_pandas_all()
1598-
1599-
# # Check that the date is correct
1600-
# timestamp_value_2200 = pandas_result_2200.iloc[0, 0]
1601-
# assert str(timestamp_value_2200) == "2200-12-31 00:00:00+00:00"

0 commit comments

Comments
 (0)