Skip to content

Commit 473811d

Browse files
SNOW-2268606-regression-3.17.0-unexplained-errors-in-connecting-to-IMDS-from-3.17.0-leads-to-connection-failures-and-excessive-logging-works-with-3.16.0 (#2489)
(cherry picked from commit 631eec6)
1 parent fc4e772 commit 473811d

File tree

5 files changed

+309
-39
lines changed

5 files changed

+309
-39
lines changed

src/snowflake/connector/auth/_auth.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ def base_auth_data(
138138
"SOCKET_TIMEOUT": socket_timeout,
139139
"PLATFORM": detect_platforms(
140140
platform_detection_timeout_seconds=platform_detection_timeout_seconds,
141-
session_manager=session_manager,
141+
session_manager=session_manager.clone(max_retries=0),
142142
),
143143
},
144144
},

src/snowflake/connector/platform_detection.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -405,7 +405,7 @@ def detect_platforms(
405405
logger.debug(
406406
"No session manager provided. HTTP settings may not be preserved. Using default."
407407
)
408-
session_manager = SessionManager(use_pooling=False)
408+
session_manager = SessionManager(use_pooling=False, max_retries=0)
409409

410410
# Run environment-only checks synchronously (no network calls, no threading overhead)
411411
platforms = {

src/snowflake/connector/session_manager.py

Lines changed: 43 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from .vendored.requests.adapters import BaseAdapter, HTTPAdapter
1717
from .vendored.requests.exceptions import InvalidProxyURL
1818
from .vendored.requests.utils import prepend_scheme_if_needed, select_proxy
19-
from .vendored.urllib3 import PoolManager
19+
from .vendored.urllib3 import PoolManager, Retry
2020
from .vendored.urllib3.poolmanager import ProxyManager
2121
from .vendored.urllib3.util.url import parse_url
2222

@@ -119,7 +119,7 @@ class BaseHttpConfig:
119119
"""Immutable HTTP configuration shared by SessionManager instances."""
120120

121121
use_pooling: bool = True
122-
max_retries: int | None = REQUESTS_RETRY
122+
max_retries: int | Retry | None = REQUESTS_RETRY
123123
proxy_host: str | None = None
124124
proxy_port: str | None = None
125125
proxy_user: str | None = None
@@ -217,6 +217,40 @@ def close(self) -> None:
217217
self._idle_sessions.clear()
218218

219219

220+
class _ConfigDirectAccessMixin(abc.ABC):
221+
@property
222+
@abc.abstractmethod
223+
def config(self) -> HttpConfig: ...
224+
225+
@config.setter
226+
@abc.abstractmethod
227+
def config(self, value) -> HttpConfig: ...
228+
229+
@property
230+
def use_pooling(self) -> bool:
231+
return self.config.use_pooling
232+
233+
@use_pooling.setter
234+
def use_pooling(self, value: bool) -> None:
235+
self.config = self.config.copy_with(use_pooling=value)
236+
237+
@property
238+
def adapter_factory(self) -> Callable[..., HTTPAdapter]:
239+
return self.config.adapter_factory
240+
241+
@adapter_factory.setter
242+
def adapter_factory(self, value: Callable[..., HTTPAdapter]) -> None:
243+
self.config = self.config.copy_with(adapter_factory=value)
244+
245+
@property
246+
def max_retries(self) -> Retry | int:
247+
return self.config.max_retries
248+
249+
@max_retries.setter
250+
def max_retries(self, value: Retry | int) -> None:
251+
self.config = self.config.copy_with(max_retries=value)
252+
253+
220254
class _RequestVerbsUsingSessionMixin(abc.ABC):
221255
"""
222256
Mixin that provides HTTP methods (get, post, put, etc.) mirroring requests.Session, maintaining their default argument behavior (e.g., HEAD uses allow_redirects=False).
@@ -327,7 +361,7 @@ def delete(
327361
return session.delete(url, headers=headers, timeout=timeout, **kwargs)
328362

329363

330-
class SessionManager(_RequestVerbsUsingSessionMixin):
364+
class SessionManager(_RequestVerbsUsingSessionMixin, _ConfigDirectAccessMixin):
331365
"""
332366
Central HTTP session manager that handles all external requests from the Snowflake driver.
333367
@@ -394,22 +428,6 @@ def proxy_url(self) -> str:
394428
self._cfg.proxy_password,
395429
)
396430

397-
@property
398-
def use_pooling(self) -> bool:
399-
return self._cfg.use_pooling
400-
401-
@use_pooling.setter
402-
def use_pooling(self, value: bool) -> None:
403-
self._cfg = self._cfg.copy_with(use_pooling=value)
404-
405-
@property
406-
def adapter_factory(self) -> Callable[..., HTTPAdapter]:
407-
return self._cfg.adapter_factory
408-
409-
@adapter_factory.setter
410-
def adapter_factory(self, value: Callable[..., HTTPAdapter]) -> None:
411-
self._cfg = self._cfg.copy_with(adapter_factory=value)
412-
413431
@property
414432
def sessions_map(self) -> dict[str, SessionPool]:
415433
return self._sessions_map
@@ -435,9 +453,7 @@ def get_session_pool_manager(session: Session, url: str) -> PoolManager | None:
435453
def _mount_adapters(self, session: requests.Session) -> None:
436454
try:
437455
# Its important that each separate session manager creates its own adapters - because they are storing internally PoolManagers - which shouldn't be reused if not in scope of the same adapter.
438-
adapter = self._cfg.adapter_factory(
439-
max_retries=self._cfg.max_retries or REQUESTS_RETRY
440-
)
456+
adapter = self._cfg.get_adapter()
441457
if adapter is not None:
442458
session.mount("http://", adapter)
443459
session.mount("https://", adapter)
@@ -505,27 +521,18 @@ def close(self):
505521

506522
def clone(
507523
self,
508-
*,
509-
use_pooling: bool | None = None,
510-
adapter_factory: AdapterFactory | None = None,
524+
**http_config_overrides,
511525
) -> SessionManager:
512526
"""Return a new *stateless* SessionManager sharing this instance’s config.
513527
514-
"Shallow" means the configuration object (HttpConfig) is reused as-is,
528+
"Shallow clone" - the configuration object (HttpConfig) is reused as-is,
515529
while *stateful* aspects such as the per-host SessionPool mapping are
516530
reset, so the two managers do not share live `requests.Session`
517531
objects.
518-
Optional *use_pooling* / *adapter_factory* overrides create a modified
519-
copy of the config before instantiation.
532+
Optional kwargs (e.g. *use_pooling* / *adapter_factory* / max_retries etc.) - overrides to create a modified
533+
copy of the HttpConfig before instantiation.
520534
"""
521-
522-
overrides: dict[str, Any] = {}
523-
if use_pooling is not None:
524-
overrides["use_pooling"] = use_pooling
525-
if adapter_factory is not None:
526-
overrides["adapter_factory"] = adapter_factory
527-
528-
return SessionManager.from_config(self._cfg, **overrides)
535+
return SessionManager.from_config(self._cfg, **http_config_overrides)
529536

530537
def __getstate__(self):
531538
state = self.__dict__.copy()

test/integ/test_connection.py

Lines changed: 172 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,8 @@
6060
except ImportError:
6161
pass
6262

63+
logger = logging.getLogger(__name__)
64+
6365

6466
def test_basic(conn_testaccount):
6567
"""Basic Connection test."""
@@ -1388,6 +1390,176 @@ def test_ocsp_mode_insecure_mode_and_disable_ocsp_checks_mismatch_ocsp_disabled(
13881390
assert "This connection does not perform OCSP checks." in caplog.text
13891391

13901392

1393+
def _message_matches_pattern(message, pattern):
1394+
"""Check if a log message matches a pattern (exact match or starts with pattern)."""
1395+
return message == pattern or message.startswith(pattern)
1396+
1397+
1398+
def _find_matching_patterns(messages, patterns):
1399+
"""Find which patterns match the given messages.
1400+
1401+
Returns:
1402+
tuple: (matched_patterns, missing_patterns, unmatched_messages)
1403+
"""
1404+
matched_patterns = set()
1405+
unmatched_messages = []
1406+
1407+
for message in messages:
1408+
found_match = False
1409+
for pattern in patterns:
1410+
if _message_matches_pattern(message, pattern):
1411+
matched_patterns.add(pattern)
1412+
found_match = True
1413+
break
1414+
if not found_match:
1415+
unmatched_messages.append(message)
1416+
1417+
missing_patterns = set(patterns) - matched_patterns
1418+
return matched_patterns, missing_patterns, unmatched_messages
1419+
1420+
1421+
def _calculate_log_bytes(messages):
1422+
"""Calculate total byte size of log messages."""
1423+
return sum(len(message.encode("utf-8")) for message in messages)
1424+
1425+
1426+
def _log_pattern_analysis(
1427+
actual_messages,
1428+
expected_patterns,
1429+
matched_patterns,
1430+
missing_patterns,
1431+
unmatched_messages,
1432+
show_all_messages=False,
1433+
):
1434+
"""Log detailed analysis of pattern differences.
1435+
1436+
Args:
1437+
actual_messages: List of actual log messages
1438+
expected_patterns: List of expected log patterns
1439+
matched_patterns: Set of patterns that were found
1440+
missing_patterns: Set of patterns that were not found
1441+
unmatched_messages: List of messages that didn't match any pattern
1442+
show_all_messages: If True, log all actual messages for debugging
1443+
"""
1444+
1445+
if missing_patterns:
1446+
logger.warning(f"Missing expected log patterns ({len(missing_patterns)}):")
1447+
for pattern in sorted(missing_patterns):
1448+
logger.warning(f" - MISSING: '{pattern}'")
1449+
1450+
if unmatched_messages:
1451+
logger.warning(f"New/unexpected log messages ({len(unmatched_messages)}):")
1452+
for message in unmatched_messages:
1453+
message_bytes = len(message.encode("utf-8"))
1454+
logger.warning(f" + NEW: '{message}' ({message_bytes} bytes)")
1455+
1456+
# Log summary
1457+
logger.warning("Log analysis summary:")
1458+
logger.warning(f" - Expected patterns: {len(expected_patterns)}")
1459+
logger.warning(f" - Matched patterns: {len(matched_patterns)}")
1460+
logger.warning(f" - Missing patterns: {len(missing_patterns)}")
1461+
logger.warning(f" - Actual messages: {len(actual_messages)}")
1462+
logger.warning(f" - Unmatched messages: {len(unmatched_messages)}")
1463+
1464+
# Show all messages if requested (useful when patterns match but bytes don't)
1465+
if show_all_messages:
1466+
logger.warning("All actual log messages:")
1467+
for i, message in enumerate(actual_messages):
1468+
message_bytes = len(message.encode("utf-8"))
1469+
logger.warning(f" [{i:2d}] '{message}' ({message_bytes} bytes)")
1470+
1471+
1472+
def _assert_log_bytes_within_tolerance(actual_bytes, expected_bytes, tolerance):
1473+
"""Assert that log bytes are within acceptable tolerance."""
1474+
assert actual_bytes == pytest.approx(expected_bytes, rel=tolerance), (
1475+
f"Log bytes {actual_bytes} is not approximately equal to expected {expected_bytes} "
1476+
f"within {tolerance*100}% tolerance. "
1477+
f"This may indicate unwanted logs being produced or changes in logging behavior."
1478+
)
1479+
1480+
1481+
@pytest.mark.skipolddriver
1482+
def test_logs_size_during_basic_query_stays_unchanged(conn_cnx, caplog):
1483+
"""Test that the amount of bytes logged during normal select 1 flow is within acceptable range. Related to: SNOW-2268606"""
1484+
caplog.set_level(logging.INFO, "snowflake.connector")
1485+
caplog.clear()
1486+
1487+
# Test-specific constants
1488+
EXPECTED_BYTES = 145
1489+
ACCEPTABLE_DELTA = 0.6
1490+
EXPECTED_PATTERNS = [
1491+
"Snowflake Connector for Python Version: ", # followed by version info
1492+
"Connecting to GLOBAL Snowflake domain",
1493+
]
1494+
1495+
with conn_cnx() as conn:
1496+
with conn.cursor() as cur:
1497+
cur.execute("select 1").fetchall()
1498+
1499+
actual_messages = [record.getMessage() for record in caplog.records]
1500+
total_log_bytes = _calculate_log_bytes(actual_messages)
1501+
1502+
if total_log_bytes != EXPECTED_BYTES:
1503+
logger.warning(
1504+
f"There was a change in a size of the logs produced by the basic Snowflake query. "
1505+
f"Expected: {EXPECTED_BYTES}, got: {total_log_bytes}. "
1506+
f"We may need to update the test_logs_size_during_basic_query_stays_unchanged - i.e. EXACT_EXPECTED_LOGS_BYTES constant."
1507+
)
1508+
1509+
# Check if patterns match to decide whether to show all messages
1510+
matched_patterns, missing_patterns, unmatched_messages = (
1511+
_find_matching_patterns(actual_messages, EXPECTED_PATTERNS)
1512+
)
1513+
patterns_match_perfectly = (
1514+
len(missing_patterns) == 0 and len(unmatched_messages) == 0
1515+
)
1516+
1517+
_log_pattern_analysis(
1518+
actual_messages,
1519+
EXPECTED_PATTERNS,
1520+
matched_patterns,
1521+
missing_patterns,
1522+
unmatched_messages,
1523+
show_all_messages=patterns_match_perfectly,
1524+
)
1525+
1526+
_assert_log_bytes_within_tolerance(
1527+
total_log_bytes, EXPECTED_BYTES, ACCEPTABLE_DELTA
1528+
)
1529+
1530+
1531+
@pytest.mark.skipolddriver
1532+
def test_no_new_warnings_or_errors_on_successful_basic_select(conn_cnx, caplog):
1533+
"""Test that the number of warning/error log entries stays the same during successful basic select operations. Related to: SNOW-2268606"""
1534+
caplog.set_level(logging.WARNING, "snowflake.connector")
1535+
baseline_warning_count = 0
1536+
baseline_error_count = 0
1537+
1538+
# Execute basic select operations and check counts remain the same
1539+
caplog.clear()
1540+
with conn_cnx() as conn:
1541+
with conn.cursor() as cur:
1542+
# Execute basic select operations
1543+
result1 = cur.execute("select 1").fetchall()
1544+
assert result1 == [(1,)]
1545+
1546+
# Count warning/error log entries after operations
1547+
test_warning_count = len(
1548+
[r for r in caplog.records if r.levelno >= logging.WARNING]
1549+
)
1550+
test_error_count = len([r for r in caplog.records if r.levelno >= logging.ERROR])
1551+
1552+
# Assert counts stay the same (no new warnings or errors)
1553+
assert test_warning_count == baseline_warning_count, (
1554+
f"Warning count increased from {baseline_warning_count} to {test_warning_count}. "
1555+
f"New warnings: {[r.getMessage() for r in caplog.records if r.levelno == logging.WARNING]}"
1556+
)
1557+
assert test_error_count == baseline_error_count, (
1558+
f"Error count increased from {baseline_error_count} to {test_error_count}. "
1559+
f"New errors: {[r.getMessage() for r in caplog.records if r.levelno >= logging.ERROR]}"
1560+
)
1561+
1562+
13911563
@pytest.mark.skipolddriver
13921564
def test_ocsp_mode_insecure_mode_and_disable_ocsp_checks_mismatch_ocsp_enabled(
13931565
conn_cnx, is_public_test, is_local_dev_setup, caplog

0 commit comments

Comments
 (0)