File tree Expand file tree Collapse file tree 1 file changed +41
-0
lines changed
Expand file tree Collapse file tree 1 file changed +41
-0
lines changed Original file line number Diff line number Diff line change 1+ # name: test/sql/storage/attach_large_insert.test
2+ # description: Ensure large string inserts (single chunk > 10 MiB) succeed by batching AppendRows requests
3+ # group: [storage]
4+
5+ require bigquery
6+
7+ require-env BQ_TEST_PROJECT
8+
9+ require-env BQ_TEST_DATASET
10+
11+ statement ok
12+ ATTACH 'project=${BQ_TEST_PROJECT} dataset=${BQ_TEST_DATASET}' AS bq (TYPE bigquery);
13+
14+ statement ok
15+ CREATE OR REPLACE TABLE bq.${BQ_TEST_DATASET}.large_json_insert (
16+ id INTEGER,
17+ payload_a STRING,
18+ payload_b STRING,
19+ payload_c STRING,
20+ payload_d STRING,
21+ payload_e STRING
22+ );
23+
24+ statement ok
25+ INSERT INTO bq.${BQ_TEST_DATASET}.large_json_insert
26+ SELECT
27+ i,
28+ '{"payload_a":"' || repeat('A', 100000) || '"}',
29+ '{"payload_b":"' || repeat('B', 100000) || '"}',
30+ '{"payload_c":"' || repeat('C', 100000) || '"}',
31+ '{"payload_d":"' || repeat('D', 100000) || '"}',
32+ '{"payload_e":"' || repeat('E', 100000) || '"}'
33+ FROM range(100) tbl(i);
34+
35+ query I
36+ SELECT COUNT(*) FROM bq.${BQ_TEST_DATASET}.large_json_insert;
37+ ----
38+ 100
39+
40+ statement ok
41+ DROP TABLE bq.${BQ_TEST_DATASET}.large_json_insert;
You can’t perform that action at this time.
0 commit comments