Skip to content

Commit 434aa7a

Browse files
gaogaotiantianHyukjinKwon
authored andcommitted
[SPARK-55612][PYTHON][TESTS] Add test_dataframe_query_context to modules
### What changes were proposed in this pull request? Add test_dataframe_query_context and test_parity_dataframe_query_context to modules.py and fix the failure. ### Why are the changes needed? The tests were not added so CI won't run them. They also currently failed because #50604 . ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Locally passed. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #54384 from gaogaotiantian/add-dataframe-query-context. Authored-by: Tian Gao <gaogaotiantian@hotmail.com> Signed-off-by: Hyukjin Kwon <gurwls223@apache.org>
1 parent a96afde commit 434aa7a

File tree

2 files changed

+24
-0
lines changed

2 files changed

+24
-0
lines changed

dev/sparktestsupport/modules.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -558,6 +558,7 @@ def __hash__(self):
558558
"pyspark.sql.tests.test_collection",
559559
"pyspark.sql.tests.test_creation",
560560
"pyspark.sql.tests.test_conversion",
561+
"pyspark.sql.tests.test_dataframe_query_context",
561562
"pyspark.sql.tests.test_listener",
562563
"pyspark.sql.tests.test_observation",
563564
"pyspark.sql.tests.test_repartition",
@@ -1165,6 +1166,7 @@ def __hash__(self):
11651166
"pyspark.sql.tests.connect.test_parity_sql",
11661167
"pyspark.sql.tests.connect.test_parity_job_cancellation",
11671168
"pyspark.sql.tests.connect.test_parity_dataframe",
1169+
"pyspark.sql.tests.connect.test_parity_dataframe_query_context",
11681170
"pyspark.sql.tests.connect.test_parity_collection",
11691171
"pyspark.sql.tests.connect.test_parity_creation",
11701172
"pyspark.sql.tests.connect.test_parity_observation",

python/pyspark/sql/tests/test_dataframe_query_context.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ def test_dataframe_query_context(self):
5151
exception=pe.exception,
5252
errorClass="CAST_INVALID_INPUT",
5353
messageParameters={
54+
"ansiConfig": '"spark.sql.ansi.enabled"',
5455
"expression": "'string'",
5556
"sourceType": '"STRING"',
5657
"targetType": '"BIGINT"',
@@ -66,6 +67,7 @@ def test_dataframe_query_context(self):
6667
exception=pe.exception,
6768
errorClass="CAST_INVALID_INPUT",
6869
messageParameters={
70+
"ansiConfig": '"spark.sql.ansi.enabled"',
6971
"expression": "'string'",
7072
"sourceType": '"STRING"',
7173
"targetType": '"BIGINT"',
@@ -81,6 +83,7 @@ def test_dataframe_query_context(self):
8183
exception=pe.exception,
8284
errorClass="CAST_INVALID_INPUT",
8385
messageParameters={
86+
"ansiConfig": '"spark.sql.ansi.enabled"',
8487
"expression": "'string'",
8588
"sourceType": '"STRING"',
8689
"targetType": '"BIGINT"',
@@ -96,6 +99,7 @@ def test_dataframe_query_context(self):
9699
exception=pe.exception,
97100
errorClass="CAST_INVALID_INPUT",
98101
messageParameters={
102+
"ansiConfig": '"spark.sql.ansi.enabled"',
99103
"expression": "'string'",
100104
"sourceType": '"STRING"',
101105
"targetType": '"BIGINT"',
@@ -111,6 +115,7 @@ def test_dataframe_query_context(self):
111115
exception=pe.exception,
112116
errorClass="CAST_INVALID_INPUT",
113117
messageParameters={
118+
"ansiConfig": '"spark.sql.ansi.enabled"',
114119
"expression": "'string'",
115120
"sourceType": '"STRING"',
116121
"targetType": '"BIGINT"',
@@ -126,6 +131,7 @@ def test_dataframe_query_context(self):
126131
exception=pe.exception,
127132
errorClass="CAST_INVALID_INPUT",
128133
messageParameters={
134+
"ansiConfig": '"spark.sql.ansi.enabled"',
129135
"expression": "'string'",
130136
"sourceType": '"STRING"',
131137
"targetType": '"BIGINT"',
@@ -141,6 +147,7 @@ def test_dataframe_query_context(self):
141147
exception=pe.exception,
142148
errorClass="CAST_INVALID_INPUT",
143149
messageParameters={
150+
"ansiConfig": '"spark.sql.ansi.enabled"',
144151
"expression": "'string'",
145152
"sourceType": '"STRING"',
146153
"targetType": '"BIGINT"',
@@ -156,6 +163,7 @@ def test_dataframe_query_context(self):
156163
exception=pe.exception,
157164
errorClass="CAST_INVALID_INPUT",
158165
messageParameters={
166+
"ansiConfig": '"spark.sql.ansi.enabled"',
159167
"expression": "'string'",
160168
"sourceType": '"STRING"',
161169
"targetType": '"BIGINT"',
@@ -171,6 +179,7 @@ def test_dataframe_query_context(self):
171179
exception=pe.exception,
172180
errorClass="CAST_INVALID_INPUT",
173181
messageParameters={
182+
"ansiConfig": '"spark.sql.ansi.enabled"',
174183
"expression": "'string'",
175184
"sourceType": '"STRING"',
176185
"targetType": '"BIGINT"',
@@ -186,6 +195,7 @@ def test_dataframe_query_context(self):
186195
exception=pe.exception,
187196
errorClass="CAST_INVALID_INPUT",
188197
messageParameters={
198+
"ansiConfig": '"spark.sql.ansi.enabled"',
189199
"expression": "'string'",
190200
"sourceType": '"STRING"',
191201
"targetType": '"BIGINT"',
@@ -201,6 +211,7 @@ def test_dataframe_query_context(self):
201211
exception=pe.exception,
202212
errorClass="CAST_INVALID_INPUT",
203213
messageParameters={
214+
"ansiConfig": '"spark.sql.ansi.enabled"',
204215
"expression": "'string'",
205216
"sourceType": '"STRING"',
206217
"targetType": '"BIGINT"',
@@ -216,6 +227,7 @@ def test_dataframe_query_context(self):
216227
exception=pe.exception,
217228
errorClass="CAST_INVALID_INPUT",
218229
messageParameters={
230+
"ansiConfig": '"spark.sql.ansi.enabled"',
219231
"expression": "'string'",
220232
"sourceType": '"STRING"',
221233
"targetType": '"BIGINT"',
@@ -231,6 +243,7 @@ def test_dataframe_query_context(self):
231243
exception=pe.exception,
232244
errorClass="CAST_INVALID_INPUT",
233245
messageParameters={
246+
"ansiConfig": '"spark.sql.ansi.enabled"',
234247
"expression": "'string'",
235248
"sourceType": '"STRING"',
236249
"targetType": '"BIGINT"',
@@ -263,6 +276,7 @@ def test_dataframe_query_context(self):
263276
exception=pe.exception,
264277
errorClass="CAST_INVALID_INPUT",
265278
messageParameters={
279+
"ansiConfig": '"spark.sql.ansi.enabled"',
266280
"expression": "'string'",
267281
"sourceType": '"STRING"',
268282
"targetType": '"BIGINT"',
@@ -282,6 +296,7 @@ def test_dataframe_query_context(self):
282296
exception=pe.exception,
283297
errorClass="CAST_INVALID_INPUT",
284298
messageParameters={
299+
"ansiConfig": '"spark.sql.ansi.enabled"',
285300
"expression": "'string'",
286301
"sourceType": '"STRING"',
287302
"targetType": '"BIGINT"',
@@ -299,6 +314,7 @@ def test_dataframe_query_context(self):
299314
exception=pe.exception,
300315
errorClass="CAST_INVALID_INPUT",
301316
messageParameters={
317+
"ansiConfig": '"spark.sql.ansi.enabled"',
302318
"expression": "'string'",
303319
"sourceType": '"STRING"',
304320
"targetType": '"BIGINT"',
@@ -325,6 +341,7 @@ def test_dataframe_query_context(self):
325341
exception=pe.exception,
326342
errorClass="CAST_INVALID_INPUT",
327343
messageParameters={
344+
"ansiConfig": '"spark.sql.ansi.enabled"',
328345
"expression": "'string'",
329346
"sourceType": '"STRING"',
330347
"targetType": '"BIGINT"',
@@ -340,6 +357,7 @@ def test_dataframe_query_context(self):
340357
exception=pe.exception,
341358
errorClass="CAST_INVALID_INPUT",
342359
messageParameters={
360+
"ansiConfig": '"spark.sql.ansi.enabled"',
343361
"expression": "'string'",
344362
"sourceType": '"STRING"',
345363
"targetType": '"BIGINT"',
@@ -355,6 +373,7 @@ def test_dataframe_query_context(self):
355373
exception=pe.exception,
356374
errorClass="CAST_INVALID_INPUT",
357375
messageParameters={
376+
"ansiConfig": '"spark.sql.ansi.enabled"',
358377
"expression": "'string'",
359378
"sourceType": '"STRING"',
360379
"targetType": '"BIGINT"',
@@ -385,6 +404,7 @@ def test_dataframe_query_context(self):
385404
exception=pe.exception,
386405
errorClass="CAST_INVALID_INPUT",
387406
messageParameters={
407+
"ansiConfig": '"spark.sql.ansi.enabled"',
388408
"expression": "'string'",
389409
"sourceType": '"STRING"',
390410
"targetType": '"BIGINT"',
@@ -402,6 +422,7 @@ def test_dataframe_query_context(self):
402422
exception=pe.exception,
403423
errorClass="CAST_INVALID_INPUT",
404424
messageParameters={
425+
"ansiConfig": '"spark.sql.ansi.enabled"',
405426
"expression": "'string'",
406427
"sourceType": '"STRING"',
407428
"targetType": '"BIGINT"',
@@ -419,6 +440,7 @@ def test_dataframe_query_context(self):
419440
exception=pe.exception,
420441
errorClass="CAST_INVALID_INPUT",
421442
messageParameters={
443+
"ansiConfig": '"spark.sql.ansi.enabled"',
422444
"expression": "'string'",
423445
"sourceType": '"STRING"',
424446
"targetType": '"BIGINT"',

0 commit comments

Comments
 (0)