@@ -51,6 +51,7 @@ def test_dataframe_query_context(self):
5151 exception = pe .exception ,
5252 errorClass = "CAST_INVALID_INPUT" ,
5353 messageParameters = {
54+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
5455 "expression" : "'string'" ,
5556 "sourceType" : '"STRING"' ,
5657 "targetType" : '"BIGINT"' ,
@@ -66,6 +67,7 @@ def test_dataframe_query_context(self):
6667 exception = pe .exception ,
6768 errorClass = "CAST_INVALID_INPUT" ,
6869 messageParameters = {
70+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
6971 "expression" : "'string'" ,
7072 "sourceType" : '"STRING"' ,
7173 "targetType" : '"BIGINT"' ,
@@ -81,6 +83,7 @@ def test_dataframe_query_context(self):
8183 exception = pe .exception ,
8284 errorClass = "CAST_INVALID_INPUT" ,
8385 messageParameters = {
86+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
8487 "expression" : "'string'" ,
8588 "sourceType" : '"STRING"' ,
8689 "targetType" : '"BIGINT"' ,
@@ -96,6 +99,7 @@ def test_dataframe_query_context(self):
9699 exception = pe .exception ,
97100 errorClass = "CAST_INVALID_INPUT" ,
98101 messageParameters = {
102+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
99103 "expression" : "'string'" ,
100104 "sourceType" : '"STRING"' ,
101105 "targetType" : '"BIGINT"' ,
@@ -111,6 +115,7 @@ def test_dataframe_query_context(self):
111115 exception = pe .exception ,
112116 errorClass = "CAST_INVALID_INPUT" ,
113117 messageParameters = {
118+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
114119 "expression" : "'string'" ,
115120 "sourceType" : '"STRING"' ,
116121 "targetType" : '"BIGINT"' ,
@@ -126,6 +131,7 @@ def test_dataframe_query_context(self):
126131 exception = pe .exception ,
127132 errorClass = "CAST_INVALID_INPUT" ,
128133 messageParameters = {
134+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
129135 "expression" : "'string'" ,
130136 "sourceType" : '"STRING"' ,
131137 "targetType" : '"BIGINT"' ,
@@ -141,6 +147,7 @@ def test_dataframe_query_context(self):
141147 exception = pe .exception ,
142148 errorClass = "CAST_INVALID_INPUT" ,
143149 messageParameters = {
150+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
144151 "expression" : "'string'" ,
145152 "sourceType" : '"STRING"' ,
146153 "targetType" : '"BIGINT"' ,
@@ -156,6 +163,7 @@ def test_dataframe_query_context(self):
156163 exception = pe .exception ,
157164 errorClass = "CAST_INVALID_INPUT" ,
158165 messageParameters = {
166+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
159167 "expression" : "'string'" ,
160168 "sourceType" : '"STRING"' ,
161169 "targetType" : '"BIGINT"' ,
@@ -171,6 +179,7 @@ def test_dataframe_query_context(self):
171179 exception = pe .exception ,
172180 errorClass = "CAST_INVALID_INPUT" ,
173181 messageParameters = {
182+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
174183 "expression" : "'string'" ,
175184 "sourceType" : '"STRING"' ,
176185 "targetType" : '"BIGINT"' ,
@@ -186,6 +195,7 @@ def test_dataframe_query_context(self):
186195 exception = pe .exception ,
187196 errorClass = "CAST_INVALID_INPUT" ,
188197 messageParameters = {
198+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
189199 "expression" : "'string'" ,
190200 "sourceType" : '"STRING"' ,
191201 "targetType" : '"BIGINT"' ,
@@ -201,6 +211,7 @@ def test_dataframe_query_context(self):
201211 exception = pe .exception ,
202212 errorClass = "CAST_INVALID_INPUT" ,
203213 messageParameters = {
214+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
204215 "expression" : "'string'" ,
205216 "sourceType" : '"STRING"' ,
206217 "targetType" : '"BIGINT"' ,
@@ -216,6 +227,7 @@ def test_dataframe_query_context(self):
216227 exception = pe .exception ,
217228 errorClass = "CAST_INVALID_INPUT" ,
218229 messageParameters = {
230+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
219231 "expression" : "'string'" ,
220232 "sourceType" : '"STRING"' ,
221233 "targetType" : '"BIGINT"' ,
@@ -231,6 +243,7 @@ def test_dataframe_query_context(self):
231243 exception = pe .exception ,
232244 errorClass = "CAST_INVALID_INPUT" ,
233245 messageParameters = {
246+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
234247 "expression" : "'string'" ,
235248 "sourceType" : '"STRING"' ,
236249 "targetType" : '"BIGINT"' ,
@@ -263,6 +276,7 @@ def test_dataframe_query_context(self):
263276 exception = pe .exception ,
264277 errorClass = "CAST_INVALID_INPUT" ,
265278 messageParameters = {
279+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
266280 "expression" : "'string'" ,
267281 "sourceType" : '"STRING"' ,
268282 "targetType" : '"BIGINT"' ,
@@ -282,6 +296,7 @@ def test_dataframe_query_context(self):
282296 exception = pe .exception ,
283297 errorClass = "CAST_INVALID_INPUT" ,
284298 messageParameters = {
299+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
285300 "expression" : "'string'" ,
286301 "sourceType" : '"STRING"' ,
287302 "targetType" : '"BIGINT"' ,
@@ -299,6 +314,7 @@ def test_dataframe_query_context(self):
299314 exception = pe .exception ,
300315 errorClass = "CAST_INVALID_INPUT" ,
301316 messageParameters = {
317+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
302318 "expression" : "'string'" ,
303319 "sourceType" : '"STRING"' ,
304320 "targetType" : '"BIGINT"' ,
@@ -325,6 +341,7 @@ def test_dataframe_query_context(self):
325341 exception = pe .exception ,
326342 errorClass = "CAST_INVALID_INPUT" ,
327343 messageParameters = {
344+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
328345 "expression" : "'string'" ,
329346 "sourceType" : '"STRING"' ,
330347 "targetType" : '"BIGINT"' ,
@@ -340,6 +357,7 @@ def test_dataframe_query_context(self):
340357 exception = pe .exception ,
341358 errorClass = "CAST_INVALID_INPUT" ,
342359 messageParameters = {
360+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
343361 "expression" : "'string'" ,
344362 "sourceType" : '"STRING"' ,
345363 "targetType" : '"BIGINT"' ,
@@ -355,6 +373,7 @@ def test_dataframe_query_context(self):
355373 exception = pe .exception ,
356374 errorClass = "CAST_INVALID_INPUT" ,
357375 messageParameters = {
376+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
358377 "expression" : "'string'" ,
359378 "sourceType" : '"STRING"' ,
360379 "targetType" : '"BIGINT"' ,
@@ -385,6 +404,7 @@ def test_dataframe_query_context(self):
385404 exception = pe .exception ,
386405 errorClass = "CAST_INVALID_INPUT" ,
387406 messageParameters = {
407+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
388408 "expression" : "'string'" ,
389409 "sourceType" : '"STRING"' ,
390410 "targetType" : '"BIGINT"' ,
@@ -402,6 +422,7 @@ def test_dataframe_query_context(self):
402422 exception = pe .exception ,
403423 errorClass = "CAST_INVALID_INPUT" ,
404424 messageParameters = {
425+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
405426 "expression" : "'string'" ,
406427 "sourceType" : '"STRING"' ,
407428 "targetType" : '"BIGINT"' ,
@@ -419,6 +440,7 @@ def test_dataframe_query_context(self):
419440 exception = pe .exception ,
420441 errorClass = "CAST_INVALID_INPUT" ,
421442 messageParameters = {
443+ "ansiConfig" : '"spark.sql.ansi.enabled"' ,
422444 "expression" : "'string'" ,
423445 "sourceType" : '"STRING"' ,
424446 "targetType" : '"BIGINT"' ,
0 commit comments