|
4 | 4 | from unittest import mock |
5 | 5 |
|
6 | 6 | import pytest |
7 | | -from databricks.sdk.errors import BadRequest, NotFound, PermissionDenied, Unknown |
| 7 | +from databricks.sdk.errors import ( |
| 8 | + BadRequest, |
| 9 | + DataLoss, |
| 10 | + NotFound, |
| 11 | + PermissionDenied, |
| 12 | + Unknown, |
| 13 | +) |
8 | 14 | from databricks.sdk.service import sql |
9 | 15 |
|
10 | 16 | from databricks.labs.ucx.framework.crawlers import ( |
@@ -260,6 +266,14 @@ def test_raise_spark_sql_exceptions(mocker): |
260 | 266 | with pytest.raises(NotFound): |
261 | 267 | rb._raise_spark_sql_exceptions(error_message_invalid_table) |
262 | 268 |
|
| 269 | + error_message_invalid_table = "DELTA_TABLE_NOT_FOUND foo table does not exist" |
| 270 | + with pytest.raises(NotFound): |
| 271 | + rb._raise_spark_sql_exceptions(error_message_invalid_table) |
| 272 | + |
| 273 | + error_message_invalid_table = "DELTA_MISSING_TRANSACTION_LOG foo table does not exist" |
| 274 | + with pytest.raises(DataLoss): |
| 275 | + rb._raise_spark_sql_exceptions(error_message_invalid_table) |
| 276 | + |
263 | 277 | error_message_invalid_syntax = "PARSE_SYNTAX_ERROR foo" |
264 | 278 | with pytest.raises(BadRequest): |
265 | 279 | rb._raise_spark_sql_exceptions(error_message_invalid_syntax) |
@@ -293,6 +307,25 @@ def test_execute(mocker): |
293 | 307 | with pytest.raises(NotFound): |
294 | 308 | rb.execute(sql_query) |
295 | 309 |
|
| 310 | + pyspark_sql_session.SparkSession.builder.getOrCreate.return_value.sql.side_effect = Exception( |
| 311 | + "DELTA_TABLE_NOT_FOUND" |
| 312 | + ) |
| 313 | + with pytest.raises(NotFound): |
| 314 | + rb.execute(sql_query) |
| 315 | + |
| 316 | + pyspark_sql_session.SparkSession.builder.getOrCreate.return_value.sql.side_effect = Exception( |
| 317 | + "Unexpected exception" |
| 318 | + ) |
| 319 | + |
| 320 | + with pytest.raises(BaseException): # noqa: B017 |
| 321 | + rb.execute(sql_query) |
| 322 | + |
| 323 | + pyspark_sql_session.SparkSession.builder.getOrCreate.return_value.sql.side_effect = Exception( |
| 324 | + "DELTA_MISSING_TRANSACTION_LOG" |
| 325 | + ) |
| 326 | + with pytest.raises(DataLoss): |
| 327 | + rb.execute(sql_query) |
| 328 | + |
296 | 329 | pyspark_sql_session.SparkSession.builder.getOrCreate.return_value.sql.side_effect = Exception( |
297 | 330 | "PARSE_SYNTAX_ERROR" |
298 | 331 | ) |
@@ -332,6 +365,18 @@ def test_fetch(mocker): |
332 | 365 | with pytest.raises(NotFound): |
333 | 366 | rb.fetch(sql_query) |
334 | 367 |
|
| 368 | + pyspark_sql_session.SparkSession.builder.getOrCreate.return_value.sql.side_effect = Exception( |
| 369 | + "DELTA_TABLE_NOT_FOUND" |
| 370 | + ) |
| 371 | + with pytest.raises(NotFound): |
| 372 | + rb.fetch(sql_query) |
| 373 | + |
| 374 | + pyspark_sql_session.SparkSession.builder.getOrCreate.return_value.sql.side_effect = Exception( |
| 375 | + "DELTA_MISSING_TRANSACTION_LOG" |
| 376 | + ) |
| 377 | + with pytest.raises(DataLoss): |
| 378 | + rb.fetch(sql_query) |
| 379 | + |
335 | 380 | pyspark_sql_session.SparkSession.builder.getOrCreate.return_value.sql.side_effect = Exception( |
336 | 381 | "PARSE_SYNTAX_ERROR" |
337 | 382 | ) |
|
0 commit comments