Skip to content

Commit f0d9e29

Browse files
Apply #2209 to async tests
1 parent 8fdaadf commit f0d9e29

File tree

1 file changed

+55
-4
lines changed

1 file changed

+55
-4
lines changed

test/integ/aio/test_cursor_async.py

Lines changed: 55 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
import pickle
1414
import time
1515
from datetime import date, datetime, timezone
16+
from typing import NamedTuple
1617
from unittest import mock
1718

1819
import pytest
@@ -56,6 +57,36 @@
5657
from snowflake.connector.util_text import random_string
5758

5859

60+
class LobBackendParams(NamedTuple):
61+
max_lob_size_in_memory: int
62+
63+
64+
@pytest.fixture()
65+
async def lob_params(conn_cnx) -> LobBackendParams:
66+
async with conn_cnx() as cnx:
67+
cursor = cnx.cursor()
68+
69+
# Get FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY parameter
70+
await cursor.execute(
71+
"show parameters like 'FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY'"
72+
)
73+
max_lob_size_in_memory_feat = await cursor.fetchone()
74+
max_lob_size_in_memory_feat = (
75+
max_lob_size_in_memory_feat and max_lob_size_in_memory_feat[1] == "ENABLED"
76+
)
77+
78+
# Get MAX_LOB_SIZE_IN_MEMORY parameter
79+
await cursor.execute("show parameters like 'MAX_LOB_SIZE_IN_MEMORY'")
80+
max_lob_size_in_memory = await cursor.fetchone()
81+
max_lob_size_in_memory = (
82+
int(max_lob_size_in_memory[1])
83+
if (max_lob_size_in_memory_feat and max_lob_size_in_memory)
84+
else 2**24
85+
)
86+
87+
return LobBackendParams(max_lob_size_in_memory)
88+
89+
5990
@pytest.fixture
6091
async def conn(conn_cnx, db_parameters):
6192
async with conn_cnx() as cnx:
@@ -1514,7 +1545,7 @@ async def test_resultbatch(
15141545
),
15151546
)
15161547
async def test_resultbatch_lazy_fetching_and_schemas(
1517-
conn_cnx, result_format, patch_path
1548+
conn_cnx, result_format, patch_path, lob_params
15181549
):
15191550
"""Tests whether pre-fetching results chunks fetches the right amount of them."""
15201551
rowcount = 1000000 # We need at least 5 chunks for this test
@@ -1543,7 +1574,17 @@ async def test_resultbatch_lazy_fetching_and_schemas(
15431574
# all batches should have the same schema
15441575
assert schema == [
15451576
ResultMetadata("C1", 0, None, None, 10, 0, False),
1546-
ResultMetadata("C2", 2, None, 16777216, None, None, False),
1577+
ResultMetadata(
1578+
"C2",
1579+
2,
1580+
None,
1581+
schema[
1582+
1
1583+
].internal_size, # TODO: lob_params.max_lob_size_in_memory,
1584+
None,
1585+
None,
1586+
False,
1587+
),
15471588
]
15481589
assert patched_download.call_count == 0
15491590
assert len(result_batches) > 5
@@ -1564,7 +1605,9 @@ async def test_resultbatch_lazy_fetching_and_schemas(
15641605

15651606

15661607
@pytest.mark.parametrize("result_format", ["json", "arrow"])
1567-
async def test_resultbatch_schema_exists_when_zero_rows(conn_cnx, result_format):
1608+
async def test_resultbatch_schema_exists_when_zero_rows(
1609+
conn_cnx, result_format, lob_params
1610+
):
15681611
async with conn_cnx(
15691612
session_parameters={"python_connector_query_result_format": result_format}
15701613
) as con:
@@ -1580,7 +1623,15 @@ async def test_resultbatch_schema_exists_when_zero_rows(conn_cnx, result_format)
15801623
schema = result_batches[0].schema
15811624
assert schema == [
15821625
ResultMetadata("C1", 0, None, None, 10, 0, False),
1583-
ResultMetadata("C2", 2, None, 16777216, None, None, False),
1626+
ResultMetadata(
1627+
"C2",
1628+
2,
1629+
None,
1630+
schema[1].internal_size, # TODO: lob_params.max_lob_size_in_memory,
1631+
None,
1632+
None,
1633+
False,
1634+
),
15841635
]
15851636

15861637

0 commit comments

Comments
 (0)