13
13
import pickle
14
14
import time
15
15
from datetime import date , datetime , timezone
16
+ from typing import NamedTuple
16
17
from unittest import mock
17
18
18
19
import pytest
56
57
from snowflake .connector .util_text import random_string
57
58
58
59
60
+ class LobBackendParams (NamedTuple ):
61
+ max_lob_size_in_memory : int
62
+
63
+
64
+ @pytest .fixture ()
65
+ async def lob_params (conn_cnx ) -> LobBackendParams :
66
+ async with conn_cnx () as cnx :
67
+ cursor = cnx .cursor ()
68
+
69
+ # Get FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY parameter
70
+ await cursor .execute (
71
+ "show parameters like 'FEATURE_INCREASED_MAX_LOB_SIZE_IN_MEMORY'"
72
+ )
73
+ max_lob_size_in_memory_feat = await cursor .fetchone ()
74
+ max_lob_size_in_memory_feat = (
75
+ max_lob_size_in_memory_feat and max_lob_size_in_memory_feat [1 ] == "ENABLED"
76
+ )
77
+
78
+ # Get MAX_LOB_SIZE_IN_MEMORY parameter
79
+ await cursor .execute ("show parameters like 'MAX_LOB_SIZE_IN_MEMORY'" )
80
+ max_lob_size_in_memory = await cursor .fetchone ()
81
+ max_lob_size_in_memory = (
82
+ int (max_lob_size_in_memory [1 ])
83
+ if (max_lob_size_in_memory_feat and max_lob_size_in_memory )
84
+ else 2 ** 24
85
+ )
86
+
87
+ return LobBackendParams (max_lob_size_in_memory )
88
+
89
+
59
90
@pytest .fixture
60
91
async def conn (conn_cnx , db_parameters ):
61
92
async with conn_cnx () as cnx :
@@ -1514,7 +1545,7 @@ async def test_resultbatch(
1514
1545
),
1515
1546
)
1516
1547
async def test_resultbatch_lazy_fetching_and_schemas (
1517
- conn_cnx , result_format , patch_path
1548
+ conn_cnx , result_format , patch_path , lob_params
1518
1549
):
1519
1550
"""Tests whether pre-fetching results chunks fetches the right amount of them."""
1520
1551
rowcount = 1000000 # We need at least 5 chunks for this test
@@ -1543,7 +1574,17 @@ async def test_resultbatch_lazy_fetching_and_schemas(
1543
1574
# all batches should have the same schema
1544
1575
assert schema == [
1545
1576
ResultMetadata ("C1" , 0 , None , None , 10 , 0 , False ),
1546
- ResultMetadata ("C2" , 2 , None , 16777216 , None , None , False ),
1577
+ ResultMetadata (
1578
+ "C2" ,
1579
+ 2 ,
1580
+ None ,
1581
+ schema [
1582
+ 1
1583
+ ].internal_size , # TODO: lob_params.max_lob_size_in_memory,
1584
+ None ,
1585
+ None ,
1586
+ False ,
1587
+ ),
1547
1588
]
1548
1589
assert patched_download .call_count == 0
1549
1590
assert len (result_batches ) > 5
@@ -1564,7 +1605,9 @@ async def test_resultbatch_lazy_fetching_and_schemas(
1564
1605
1565
1606
1566
1607
@pytest .mark .parametrize ("result_format" , ["json" , "arrow" ])
1567
- async def test_resultbatch_schema_exists_when_zero_rows (conn_cnx , result_format ):
1608
+ async def test_resultbatch_schema_exists_when_zero_rows (
1609
+ conn_cnx , result_format , lob_params
1610
+ ):
1568
1611
async with conn_cnx (
1569
1612
session_parameters = {"python_connector_query_result_format" : result_format }
1570
1613
) as con :
@@ -1580,7 +1623,15 @@ async def test_resultbatch_schema_exists_when_zero_rows(conn_cnx, result_format)
1580
1623
schema = result_batches [0 ].schema
1581
1624
assert schema == [
1582
1625
ResultMetadata ("C1" , 0 , None , None , 10 , 0 , False ),
1583
- ResultMetadata ("C2" , 2 , None , 16777216 , None , None , False ),
1626
+ ResultMetadata (
1627
+ "C2" ,
1628
+ 2 ,
1629
+ None ,
1630
+ schema [1 ].internal_size , # TODO: lob_params.max_lob_size_in_memory,
1631
+ None ,
1632
+ None ,
1633
+ False ,
1634
+ ),
1584
1635
]
1585
1636
1586
1637
0 commit comments