Skip to content

Commit 631eec6

Browse files
SNOW-2268606-regression-3.17.0-unexplained-errors-in-connecting-to-IMDS-from-3.17.0-leads-to-connection-failures-and-excessive-logging-works-with-3.16.0 (#2489)
1 parent 7367d39 commit 631eec6

File tree

6 files changed

+328
-39
lines changed

6 files changed

+328
-39
lines changed

DESCRIPTION.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,9 @@ https://docs.snowflake.com/
77
Source code is also available at: https://github.com/snowflakedb/snowflake-connector-python
88

99
# Release Notes
10+
- v3.17.2(TBD)
11+
- Fixed a bug where platform_detection was retrying failed requests with warnings to non-existent endpoints.
12+
1013
- v3.17.1(August 17,2025)
1114
- Added `infer_schema` parameter to `write_pandas` to perform schema inference on the passed data.
1215
- Namespace `snowlake` reverted back to non-module.

src/snowflake/connector/auth/_auth.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ def base_auth_data(
128128
"SOCKET_TIMEOUT": socket_timeout,
129129
"PLATFORM": detect_platforms(
130130
platform_detection_timeout_seconds=platform_detection_timeout_seconds,
131-
session_manager=session_manager,
131+
session_manager=session_manager.clone(max_retries=0),
132132
),
133133
},
134134
},

src/snowflake/connector/platform_detection.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -399,7 +399,7 @@ def detect_platforms(
399399

400400
if session_manager is None:
401401
# This should never happen - we expect session manager to be passed from the outer scope
402-
session_manager = SessionManager(use_pooling=False)
402+
session_manager = SessionManager(use_pooling=False, max_retries=0)
403403

404404
# Run environment-only checks synchronously (no network calls, no threading overhead)
405405
platforms = {

src/snowflake/connector/session_manager.py

Lines changed: 59 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from .vendored.requests.adapters import BaseAdapter, HTTPAdapter
1717
from .vendored.requests.exceptions import InvalidProxyURL
1818
from .vendored.requests.utils import prepend_scheme_if_needed, select_proxy
19-
from .vendored.urllib3 import PoolManager
19+
from .vendored.urllib3 import PoolManager, Retry
2020
from .vendored.urllib3.poolmanager import ProxyManager
2121
from .vendored.urllib3.util.url import parse_url
2222

@@ -119,7 +119,7 @@ class HttpConfig:
119119
default_factory=ProxySupportAdapterFactory
120120
)
121121
use_pooling: bool = True
122-
max_retries: int | None = REQUESTS_RETRY
122+
max_retries: int | Retry | None = REQUESTS_RETRY
123123
proxy_host: str | None = None
124124
proxy_port: str | None = None
125125
proxy_user: str | None = None
@@ -129,6 +129,22 @@ def copy_with(self, **overrides: Any) -> HttpConfig:
129129
"""Return a new HttpConfig with overrides applied."""
130130
return replace(self, **overrides)
131131

132+
def get_adapter(self, **override_adapter_factory_kwargs) -> HTTPAdapter:
133+
# We pass here only chosen attributes as kwargs to make the arguments received by the factory as compliant with the HttpAdapter constructor interface as possible.
134+
# We could consider passing the whole HttpConfig as kwarg to the factory if necessary in the future.
135+
attributes_for_adapter_factory = frozenset(
136+
{
137+
"max_retries",
138+
}
139+
)
140+
141+
self_kwargs_for_adapter_factory = {
142+
attr_name: getattr(self, attr_name)
143+
for attr_name in attributes_for_adapter_factory
144+
}
145+
self_kwargs_for_adapter_factory.update(override_adapter_factory_kwargs)
146+
return self.adapter_factory(**self_kwargs_for_adapter_factory)
147+
132148

133149
class SessionPool:
134150
"""
@@ -185,6 +201,40 @@ def close(self) -> None:
185201
self._idle_sessions.clear()
186202

187203

204+
class _ConfigDirectAccessMixin(abc.ABC):
205+
@property
206+
@abc.abstractmethod
207+
def config(self) -> HttpConfig: ...
208+
209+
@config.setter
210+
@abc.abstractmethod
211+
def config(self, value) -> HttpConfig: ...
212+
213+
@property
214+
def use_pooling(self) -> bool:
215+
return self.config.use_pooling
216+
217+
@use_pooling.setter
218+
def use_pooling(self, value: bool) -> None:
219+
self.config = self.config.copy_with(use_pooling=value)
220+
221+
@property
222+
def adapter_factory(self) -> Callable[..., HTTPAdapter]:
223+
return self.config.adapter_factory
224+
225+
@adapter_factory.setter
226+
def adapter_factory(self, value: Callable[..., HTTPAdapter]) -> None:
227+
self.config = self.config.copy_with(adapter_factory=value)
228+
229+
@property
230+
def max_retries(self) -> Retry | int:
231+
return self.config.max_retries
232+
233+
@max_retries.setter
234+
def max_retries(self, value: Retry | int) -> None:
235+
self.config = self.config.copy_with(max_retries=value)
236+
237+
188238
class _RequestVerbsUsingSessionMixin(abc.ABC):
189239
"""
190240
Mixin that provides HTTP methods (get, post, put, etc.) mirroring requests.Session, maintaining their default argument behavior (e.g., HEAD uses allow_redirects=False).
@@ -295,7 +345,7 @@ def delete(
295345
return session.delete(url, headers=headers, timeout=timeout, **kwargs)
296346

297347

298-
class SessionManager(_RequestVerbsUsingSessionMixin):
348+
class SessionManager(_RequestVerbsUsingSessionMixin, _ConfigDirectAccessMixin):
299349
"""
300350
Central HTTP session manager that handles all external requests from the Snowflake driver.
301351
@@ -362,22 +412,6 @@ def proxy_url(self) -> str:
362412
self._cfg.proxy_password,
363413
)
364414

365-
@property
366-
def use_pooling(self) -> bool:
367-
return self._cfg.use_pooling
368-
369-
@use_pooling.setter
370-
def use_pooling(self, value: bool) -> None:
371-
self._cfg = self._cfg.copy_with(use_pooling=value)
372-
373-
@property
374-
def adapter_factory(self) -> Callable[..., HTTPAdapter]:
375-
return self._cfg.adapter_factory
376-
377-
@adapter_factory.setter
378-
def adapter_factory(self, value: Callable[..., HTTPAdapter]) -> None:
379-
self._cfg = self._cfg.copy_with(adapter_factory=value)
380-
381415
@property
382416
def sessions_map(self) -> dict[str, SessionPool]:
383417
return self._sessions_map
@@ -403,9 +437,7 @@ def get_session_pool_manager(session: Session, url: str) -> PoolManager | None:
403437
def _mount_adapters(self, session: requests.Session) -> None:
404438
try:
405439
# Its important that each separate session manager creates its own adapters - because they are storing internally PoolManagers - which shouldn't be reused if not in scope of the same adapter.
406-
adapter = self._cfg.adapter_factory(
407-
max_retries=self._cfg.max_retries or REQUESTS_RETRY
408-
)
440+
adapter = self._cfg.get_adapter()
409441
if adapter is not None:
410442
session.mount("http://", adapter)
411443
session.mount("https://", adapter)
@@ -473,27 +505,18 @@ def close(self):
473505

474506
def clone(
475507
self,
476-
*,
477-
use_pooling: bool | None = None,
478-
adapter_factory: AdapterFactory | None = None,
508+
**http_config_overrides,
479509
) -> SessionManager:
480510
"""Return a new *stateless* SessionManager sharing this instance’s config.
481511
482-
"Shallow" means the configuration object (HttpConfig) is reused as-is,
512+
"Shallow clone" - the configuration object (HttpConfig) is reused as-is,
483513
while *stateful* aspects such as the per-host SessionPool mapping are
484514
reset, so the two managers do not share live `requests.Session`
485515
objects.
486-
Optional *use_pooling* / *adapter_factory* overrides create a modified
487-
copy of the config before instantiation.
516+
Optional kwargs (e.g. *use_pooling* / *adapter_factory* / max_retries etc.) - overrides to create a modified
517+
copy of the HttpConfig before instantiation.
488518
"""
489-
490-
overrides: dict[str, Any] = {}
491-
if use_pooling is not None:
492-
overrides["use_pooling"] = use_pooling
493-
if adapter_factory is not None:
494-
overrides["adapter_factory"] = adapter_factory
495-
496-
return SessionManager.from_config(self._cfg, **overrides)
519+
return SessionManager.from_config(self._cfg, **http_config_overrides)
497520

498521
def __getstate__(self):
499522
state = self.__dict__.copy()

test/integ/test_connection.py

Lines changed: 172 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,8 @@
6060
except ImportError:
6161
pass
6262

63+
logger = logging.getLogger(__name__)
64+
6365

6466
def test_basic(conn_testaccount):
6567
"""Basic Connection test."""
@@ -1347,6 +1349,176 @@ def test_ocsp_mode_insecure_mode_and_disable_ocsp_checks_mismatch_ocsp_disabled(
13471349
assert "This connection does not perform OCSP checks." in caplog.text
13481350

13491351

1352+
def _message_matches_pattern(message, pattern):
1353+
"""Check if a log message matches a pattern (exact match or starts with pattern)."""
1354+
return message == pattern or message.startswith(pattern)
1355+
1356+
1357+
def _find_matching_patterns(messages, patterns):
1358+
"""Find which patterns match the given messages.
1359+
1360+
Returns:
1361+
tuple: (matched_patterns, missing_patterns, unmatched_messages)
1362+
"""
1363+
matched_patterns = set()
1364+
unmatched_messages = []
1365+
1366+
for message in messages:
1367+
found_match = False
1368+
for pattern in patterns:
1369+
if _message_matches_pattern(message, pattern):
1370+
matched_patterns.add(pattern)
1371+
found_match = True
1372+
break
1373+
if not found_match:
1374+
unmatched_messages.append(message)
1375+
1376+
missing_patterns = set(patterns) - matched_patterns
1377+
return matched_patterns, missing_patterns, unmatched_messages
1378+
1379+
1380+
def _calculate_log_bytes(messages):
1381+
"""Calculate total byte size of log messages."""
1382+
return sum(len(message.encode("utf-8")) for message in messages)
1383+
1384+
1385+
def _log_pattern_analysis(
1386+
actual_messages,
1387+
expected_patterns,
1388+
matched_patterns,
1389+
missing_patterns,
1390+
unmatched_messages,
1391+
show_all_messages=False,
1392+
):
1393+
"""Log detailed analysis of pattern differences.
1394+
1395+
Args:
1396+
actual_messages: List of actual log messages
1397+
expected_patterns: List of expected log patterns
1398+
matched_patterns: Set of patterns that were found
1399+
missing_patterns: Set of patterns that were not found
1400+
unmatched_messages: List of messages that didn't match any pattern
1401+
show_all_messages: If True, log all actual messages for debugging
1402+
"""
1403+
1404+
if missing_patterns:
1405+
logger.warning(f"Missing expected log patterns ({len(missing_patterns)}):")
1406+
for pattern in sorted(missing_patterns):
1407+
logger.warning(f" - MISSING: '{pattern}'")
1408+
1409+
if unmatched_messages:
1410+
logger.warning(f"New/unexpected log messages ({len(unmatched_messages)}):")
1411+
for message in unmatched_messages:
1412+
message_bytes = len(message.encode("utf-8"))
1413+
logger.warning(f" + NEW: '{message}' ({message_bytes} bytes)")
1414+
1415+
# Log summary
1416+
logger.warning("Log analysis summary:")
1417+
logger.warning(f" - Expected patterns: {len(expected_patterns)}")
1418+
logger.warning(f" - Matched patterns: {len(matched_patterns)}")
1419+
logger.warning(f" - Missing patterns: {len(missing_patterns)}")
1420+
logger.warning(f" - Actual messages: {len(actual_messages)}")
1421+
logger.warning(f" - Unmatched messages: {len(unmatched_messages)}")
1422+
1423+
# Show all messages if requested (useful when patterns match but bytes don't)
1424+
if show_all_messages:
1425+
logger.warning("All actual log messages:")
1426+
for i, message in enumerate(actual_messages):
1427+
message_bytes = len(message.encode("utf-8"))
1428+
logger.warning(f" [{i:2d}] '{message}' ({message_bytes} bytes)")
1429+
1430+
1431+
def _assert_log_bytes_within_tolerance(actual_bytes, expected_bytes, tolerance):
1432+
"""Assert that log bytes are within acceptable tolerance."""
1433+
assert actual_bytes == pytest.approx(expected_bytes, rel=tolerance), (
1434+
f"Log bytes {actual_bytes} is not approximately equal to expected {expected_bytes} "
1435+
f"within {tolerance*100}% tolerance. "
1436+
f"This may indicate unwanted logs being produced or changes in logging behavior."
1437+
)
1438+
1439+
1440+
@pytest.mark.skipolddriver
1441+
def test_logs_size_during_basic_query_stays_unchanged(conn_cnx, caplog):
1442+
"""Test that the amount of bytes logged during normal select 1 flow is within acceptable range. Related to: SNOW-2268606"""
1443+
caplog.set_level(logging.INFO, "snowflake.connector")
1444+
caplog.clear()
1445+
1446+
# Test-specific constants
1447+
EXPECTED_BYTES = 145
1448+
ACCEPTABLE_DELTA = 0.6
1449+
EXPECTED_PATTERNS = [
1450+
"Snowflake Connector for Python Version: ", # followed by version info
1451+
"Connecting to GLOBAL Snowflake domain",
1452+
]
1453+
1454+
with conn_cnx() as conn:
1455+
with conn.cursor() as cur:
1456+
cur.execute("select 1").fetchall()
1457+
1458+
actual_messages = [record.getMessage() for record in caplog.records]
1459+
total_log_bytes = _calculate_log_bytes(actual_messages)
1460+
1461+
if total_log_bytes != EXPECTED_BYTES:
1462+
logger.warning(
1463+
f"There was a change in a size of the logs produced by the basic Snowflake query. "
1464+
f"Expected: {EXPECTED_BYTES}, got: {total_log_bytes}. "
1465+
f"We may need to update the test_logs_size_during_basic_query_stays_unchanged - i.e. EXACT_EXPECTED_LOGS_BYTES constant."
1466+
)
1467+
1468+
# Check if patterns match to decide whether to show all messages
1469+
matched_patterns, missing_patterns, unmatched_messages = (
1470+
_find_matching_patterns(actual_messages, EXPECTED_PATTERNS)
1471+
)
1472+
patterns_match_perfectly = (
1473+
len(missing_patterns) == 0 and len(unmatched_messages) == 0
1474+
)
1475+
1476+
_log_pattern_analysis(
1477+
actual_messages,
1478+
EXPECTED_PATTERNS,
1479+
matched_patterns,
1480+
missing_patterns,
1481+
unmatched_messages,
1482+
show_all_messages=patterns_match_perfectly,
1483+
)
1484+
1485+
_assert_log_bytes_within_tolerance(
1486+
total_log_bytes, EXPECTED_BYTES, ACCEPTABLE_DELTA
1487+
)
1488+
1489+
1490+
@pytest.mark.skipolddriver
1491+
def test_no_new_warnings_or_errors_on_successful_basic_select(conn_cnx, caplog):
1492+
"""Test that the number of warning/error log entries stays the same during successful basic select operations. Related to: SNOW-2268606"""
1493+
caplog.set_level(logging.WARNING, "snowflake.connector")
1494+
baseline_warning_count = 0
1495+
baseline_error_count = 0
1496+
1497+
# Execute basic select operations and check counts remain the same
1498+
caplog.clear()
1499+
with conn_cnx() as conn:
1500+
with conn.cursor() as cur:
1501+
# Execute basic select operations
1502+
result1 = cur.execute("select 1").fetchall()
1503+
assert result1 == [(1,)]
1504+
1505+
# Count warning/error log entries after operations
1506+
test_warning_count = len(
1507+
[r for r in caplog.records if r.levelno >= logging.WARNING]
1508+
)
1509+
test_error_count = len([r for r in caplog.records if r.levelno >= logging.ERROR])
1510+
1511+
# Assert counts stay the same (no new warnings or errors)
1512+
assert test_warning_count == baseline_warning_count, (
1513+
f"Warning count increased from {baseline_warning_count} to {test_warning_count}. "
1514+
f"New warnings: {[r.getMessage() for r in caplog.records if r.levelno == logging.WARNING]}"
1515+
)
1516+
assert test_error_count == baseline_error_count, (
1517+
f"Error count increased from {baseline_error_count} to {test_error_count}. "
1518+
f"New errors: {[r.getMessage() for r in caplog.records if r.levelno >= logging.ERROR]}"
1519+
)
1520+
1521+
13501522
@pytest.mark.skipolddriver
13511523
def test_ocsp_mode_insecure_mode_and_disable_ocsp_checks_mismatch_ocsp_enabled(
13521524
conn_cnx, is_public_test, is_local_dev_setup, caplog

0 commit comments

Comments
 (0)