Skip to content

Commit 56bf4dd

Browse files
SNOW-2268606-regression-3.17.0-unexplained-errors-in-connecting-to-IMDS-from-3.17.0-leads-to-connection-failures-and-excessive-logging-works-with-3.16.0 (#2489)
(cherry picked from commit 631eec6)
1 parent aa9526b commit 56bf4dd

File tree

5 files changed

+309
-39
lines changed

5 files changed

+309
-39
lines changed

src/snowflake/connector/auth/_auth.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ def base_auth_data(
138138
"SOCKET_TIMEOUT": socket_timeout,
139139
"PLATFORM": detect_platforms(
140140
platform_detection_timeout_seconds=platform_detection_timeout_seconds,
141-
session_manager=session_manager,
141+
session_manager=session_manager.clone(max_retries=0),
142142
),
143143
},
144144
},

src/snowflake/connector/platform_detection.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -405,7 +405,7 @@ def detect_platforms(
405405
logger.debug(
406406
"No session manager provided. HTTP settings may not be preserved. Using default."
407407
)
408-
session_manager = SessionManager(use_pooling=False)
408+
session_manager = SessionManager(use_pooling=False, max_retries=0)
409409

410410
# Run environment-only checks synchronously (no network calls, no threading overhead)
411411
platforms = {

src/snowflake/connector/session_manager.py

Lines changed: 43 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from .vendored.requests.adapters import BaseAdapter, HTTPAdapter
1717
from .vendored.requests.exceptions import InvalidProxyURL
1818
from .vendored.requests.utils import prepend_scheme_if_needed, select_proxy
19-
from .vendored.urllib3 import PoolManager
19+
from .vendored.urllib3 import PoolManager, Retry
2020
from .vendored.urllib3.poolmanager import ProxyManager
2121
from .vendored.urllib3.util.url import parse_url
2222

@@ -119,7 +119,7 @@ class BaseHttpConfig:
119119
"""Immutable HTTP configuration shared by SessionManager instances."""
120120

121121
use_pooling: bool = True
122-
max_retries: int | None = REQUESTS_RETRY
122+
max_retries: int | Retry | None = REQUESTS_RETRY
123123
proxy_host: str | None = None
124124
proxy_port: str | None = None
125125
proxy_user: str | None = None
@@ -217,6 +217,40 @@ def close(self) -> None:
217217
self._idle_sessions.clear()
218218

219219

220+
class _ConfigDirectAccessMixin(abc.ABC):
221+
@property
222+
@abc.abstractmethod
223+
def config(self) -> HttpConfig: ...
224+
225+
@config.setter
226+
@abc.abstractmethod
227+
def config(self, value) -> HttpConfig: ...
228+
229+
@property
230+
def use_pooling(self) -> bool:
231+
return self.config.use_pooling
232+
233+
@use_pooling.setter
234+
def use_pooling(self, value: bool) -> None:
235+
self.config = self.config.copy_with(use_pooling=value)
236+
237+
@property
238+
def adapter_factory(self) -> Callable[..., HTTPAdapter]:
239+
return self.config.adapter_factory
240+
241+
@adapter_factory.setter
242+
def adapter_factory(self, value: Callable[..., HTTPAdapter]) -> None:
243+
self.config = self.config.copy_with(adapter_factory=value)
244+
245+
@property
246+
def max_retries(self) -> Retry | int:
247+
return self.config.max_retries
248+
249+
@max_retries.setter
250+
def max_retries(self, value: Retry | int) -> None:
251+
self.config = self.config.copy_with(max_retries=value)
252+
253+
220254
class _RequestVerbsUsingSessionMixin(abc.ABC):
221255
"""
222256
Mixin that provides HTTP methods (get, post, put, etc.) mirroring requests.Session, maintaining their default argument behavior (e.g., HEAD uses allow_redirects=False).
@@ -327,7 +361,7 @@ def delete(
327361
return session.delete(url, headers=headers, timeout=timeout, **kwargs)
328362

329363

330-
class SessionManager(_RequestVerbsUsingSessionMixin):
364+
class SessionManager(_RequestVerbsUsingSessionMixin, _ConfigDirectAccessMixin):
331365
"""
332366
Central HTTP session manager that handles all external requests from the Snowflake driver.
333367
@@ -394,22 +428,6 @@ def proxy_url(self) -> str:
394428
self._cfg.proxy_password,
395429
)
396430

397-
@property
398-
def use_pooling(self) -> bool:
399-
return self._cfg.use_pooling
400-
401-
@use_pooling.setter
402-
def use_pooling(self, value: bool) -> None:
403-
self._cfg = self._cfg.copy_with(use_pooling=value)
404-
405-
@property
406-
def adapter_factory(self) -> Callable[..., HTTPAdapter]:
407-
return self._cfg.adapter_factory
408-
409-
@adapter_factory.setter
410-
def adapter_factory(self, value: Callable[..., HTTPAdapter]) -> None:
411-
self._cfg = self._cfg.copy_with(adapter_factory=value)
412-
413431
@property
414432
def sessions_map(self) -> dict[str, SessionPool]:
415433
return self._sessions_map
@@ -435,9 +453,7 @@ def get_session_pool_manager(session: Session, url: str) -> PoolManager | None:
435453
def _mount_adapters(self, session: requests.Session) -> None:
436454
try:
437455
# Its important that each separate session manager creates its own adapters - because they are storing internally PoolManagers - which shouldn't be reused if not in scope of the same adapter.
438-
adapter = self._cfg.adapter_factory(
439-
max_retries=self._cfg.max_retries or REQUESTS_RETRY
440-
)
456+
adapter = self._cfg.get_adapter()
441457
if adapter is not None:
442458
session.mount("http://", adapter)
443459
session.mount("https://", adapter)
@@ -505,27 +521,18 @@ def close(self):
505521

506522
def clone(
507523
self,
508-
*,
509-
use_pooling: bool | None = None,
510-
adapter_factory: AdapterFactory | None = None,
524+
**http_config_overrides,
511525
) -> SessionManager:
512526
"""Return a new *stateless* SessionManager sharing this instance’s config.
513527
514-
"Shallow" means the configuration object (HttpConfig) is reused as-is,
528+
"Shallow clone" - the configuration object (HttpConfig) is reused as-is,
515529
while *stateful* aspects such as the per-host SessionPool mapping are
516530
reset, so the two managers do not share live `requests.Session`
517531
objects.
518-
Optional *use_pooling* / *adapter_factory* overrides create a modified
519-
copy of the config before instantiation.
532+
Optional kwargs (e.g. *use_pooling* / *adapter_factory* / max_retries etc.) - overrides to create a modified
533+
copy of the HttpConfig before instantiation.
520534
"""
521-
522-
overrides: dict[str, Any] = {}
523-
if use_pooling is not None:
524-
overrides["use_pooling"] = use_pooling
525-
if adapter_factory is not None:
526-
overrides["adapter_factory"] = adapter_factory
527-
528-
return SessionManager.from_config(self._cfg, **overrides)
535+
return SessionManager.from_config(self._cfg, **http_config_overrides)
529536

530537
def __getstate__(self):
531538
state = self.__dict__.copy()

test/integ/test_connection.py

Lines changed: 172 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,8 @@
6060
except ImportError:
6161
pass
6262

63+
logger = logging.getLogger(__name__)
64+
6365

6466
def test_basic(conn_testaccount):
6567
"""Basic Connection test."""
@@ -1398,6 +1400,176 @@ def test_ocsp_mode_insecure_mode_and_disable_ocsp_checks_mismatch_ocsp_disabled(
13981400
assert "This connection does not perform OCSP checks." in caplog.text
13991401

14001402

1403+
def _message_matches_pattern(message, pattern):
1404+
"""Check if a log message matches a pattern (exact match or starts with pattern)."""
1405+
return message == pattern or message.startswith(pattern)
1406+
1407+
1408+
def _find_matching_patterns(messages, patterns):
1409+
"""Find which patterns match the given messages.
1410+
1411+
Returns:
1412+
tuple: (matched_patterns, missing_patterns, unmatched_messages)
1413+
"""
1414+
matched_patterns = set()
1415+
unmatched_messages = []
1416+
1417+
for message in messages:
1418+
found_match = False
1419+
for pattern in patterns:
1420+
if _message_matches_pattern(message, pattern):
1421+
matched_patterns.add(pattern)
1422+
found_match = True
1423+
break
1424+
if not found_match:
1425+
unmatched_messages.append(message)
1426+
1427+
missing_patterns = set(patterns) - matched_patterns
1428+
return matched_patterns, missing_patterns, unmatched_messages
1429+
1430+
1431+
def _calculate_log_bytes(messages):
1432+
"""Calculate total byte size of log messages."""
1433+
return sum(len(message.encode("utf-8")) for message in messages)
1434+
1435+
1436+
def _log_pattern_analysis(
1437+
actual_messages,
1438+
expected_patterns,
1439+
matched_patterns,
1440+
missing_patterns,
1441+
unmatched_messages,
1442+
show_all_messages=False,
1443+
):
1444+
"""Log detailed analysis of pattern differences.
1445+
1446+
Args:
1447+
actual_messages: List of actual log messages
1448+
expected_patterns: List of expected log patterns
1449+
matched_patterns: Set of patterns that were found
1450+
missing_patterns: Set of patterns that were not found
1451+
unmatched_messages: List of messages that didn't match any pattern
1452+
show_all_messages: If True, log all actual messages for debugging
1453+
"""
1454+
1455+
if missing_patterns:
1456+
logger.warning(f"Missing expected log patterns ({len(missing_patterns)}):")
1457+
for pattern in sorted(missing_patterns):
1458+
logger.warning(f" - MISSING: '{pattern}'")
1459+
1460+
if unmatched_messages:
1461+
logger.warning(f"New/unexpected log messages ({len(unmatched_messages)}):")
1462+
for message in unmatched_messages:
1463+
message_bytes = len(message.encode("utf-8"))
1464+
logger.warning(f" + NEW: '{message}' ({message_bytes} bytes)")
1465+
1466+
# Log summary
1467+
logger.warning("Log analysis summary:")
1468+
logger.warning(f" - Expected patterns: {len(expected_patterns)}")
1469+
logger.warning(f" - Matched patterns: {len(matched_patterns)}")
1470+
logger.warning(f" - Missing patterns: {len(missing_patterns)}")
1471+
logger.warning(f" - Actual messages: {len(actual_messages)}")
1472+
logger.warning(f" - Unmatched messages: {len(unmatched_messages)}")
1473+
1474+
# Show all messages if requested (useful when patterns match but bytes don't)
1475+
if show_all_messages:
1476+
logger.warning("All actual log messages:")
1477+
for i, message in enumerate(actual_messages):
1478+
message_bytes = len(message.encode("utf-8"))
1479+
logger.warning(f" [{i:2d}] '{message}' ({message_bytes} bytes)")
1480+
1481+
1482+
def _assert_log_bytes_within_tolerance(actual_bytes, expected_bytes, tolerance):
1483+
"""Assert that log bytes are within acceptable tolerance."""
1484+
assert actual_bytes == pytest.approx(expected_bytes, rel=tolerance), (
1485+
f"Log bytes {actual_bytes} is not approximately equal to expected {expected_bytes} "
1486+
f"within {tolerance*100}% tolerance. "
1487+
f"This may indicate unwanted logs being produced or changes in logging behavior."
1488+
)
1489+
1490+
1491+
@pytest.mark.skipolddriver
1492+
def test_logs_size_during_basic_query_stays_unchanged(conn_cnx, caplog):
1493+
"""Test that the amount of bytes logged during normal select 1 flow is within acceptable range. Related to: SNOW-2268606"""
1494+
caplog.set_level(logging.INFO, "snowflake.connector")
1495+
caplog.clear()
1496+
1497+
# Test-specific constants
1498+
EXPECTED_BYTES = 145
1499+
ACCEPTABLE_DELTA = 0.6
1500+
EXPECTED_PATTERNS = [
1501+
"Snowflake Connector for Python Version: ", # followed by version info
1502+
"Connecting to GLOBAL Snowflake domain",
1503+
]
1504+
1505+
with conn_cnx() as conn:
1506+
with conn.cursor() as cur:
1507+
cur.execute("select 1").fetchall()
1508+
1509+
actual_messages = [record.getMessage() for record in caplog.records]
1510+
total_log_bytes = _calculate_log_bytes(actual_messages)
1511+
1512+
if total_log_bytes != EXPECTED_BYTES:
1513+
logger.warning(
1514+
f"There was a change in a size of the logs produced by the basic Snowflake query. "
1515+
f"Expected: {EXPECTED_BYTES}, got: {total_log_bytes}. "
1516+
f"We may need to update the test_logs_size_during_basic_query_stays_unchanged - i.e. EXACT_EXPECTED_LOGS_BYTES constant."
1517+
)
1518+
1519+
# Check if patterns match to decide whether to show all messages
1520+
matched_patterns, missing_patterns, unmatched_messages = (
1521+
_find_matching_patterns(actual_messages, EXPECTED_PATTERNS)
1522+
)
1523+
patterns_match_perfectly = (
1524+
len(missing_patterns) == 0 and len(unmatched_messages) == 0
1525+
)
1526+
1527+
_log_pattern_analysis(
1528+
actual_messages,
1529+
EXPECTED_PATTERNS,
1530+
matched_patterns,
1531+
missing_patterns,
1532+
unmatched_messages,
1533+
show_all_messages=patterns_match_perfectly,
1534+
)
1535+
1536+
_assert_log_bytes_within_tolerance(
1537+
total_log_bytes, EXPECTED_BYTES, ACCEPTABLE_DELTA
1538+
)
1539+
1540+
1541+
@pytest.mark.skipolddriver
1542+
def test_no_new_warnings_or_errors_on_successful_basic_select(conn_cnx, caplog):
1543+
"""Test that the number of warning/error log entries stays the same during successful basic select operations. Related to: SNOW-2268606"""
1544+
caplog.set_level(logging.WARNING, "snowflake.connector")
1545+
baseline_warning_count = 0
1546+
baseline_error_count = 0
1547+
1548+
# Execute basic select operations and check counts remain the same
1549+
caplog.clear()
1550+
with conn_cnx() as conn:
1551+
with conn.cursor() as cur:
1552+
# Execute basic select operations
1553+
result1 = cur.execute("select 1").fetchall()
1554+
assert result1 == [(1,)]
1555+
1556+
# Count warning/error log entries after operations
1557+
test_warning_count = len(
1558+
[r for r in caplog.records if r.levelno >= logging.WARNING]
1559+
)
1560+
test_error_count = len([r for r in caplog.records if r.levelno >= logging.ERROR])
1561+
1562+
# Assert counts stay the same (no new warnings or errors)
1563+
assert test_warning_count == baseline_warning_count, (
1564+
f"Warning count increased from {baseline_warning_count} to {test_warning_count}. "
1565+
f"New warnings: {[r.getMessage() for r in caplog.records if r.levelno == logging.WARNING]}"
1566+
)
1567+
assert test_error_count == baseline_error_count, (
1568+
f"Error count increased from {baseline_error_count} to {test_error_count}. "
1569+
f"New errors: {[r.getMessage() for r in caplog.records if r.levelno >= logging.ERROR]}"
1570+
)
1571+
1572+
14011573
@pytest.mark.skipolddriver
14021574
def test_ocsp_mode_insecure_mode_and_disable_ocsp_checks_mismatch_ocsp_enabled(
14031575
conn_cnx, is_public_test, is_local_dev_setup, caplog

0 commit comments

Comments
 (0)