Skip to content

Commit 6ec8c0d

Browse files
committed
Test job with SparkPythonTask
1 parent e17a990 commit 6ec8c0d

File tree

1 file changed

+10
-2
lines changed

1 file changed

+10
-2
lines changed

tests/integration/fixtures/test_compute.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
import logging
22
from datetime import datetime, timedelta, timezone
33

4-
from databricks.sdk.service.iam import PermissionLevel
5-
from databricks.sdk.service.jobs import RunResultState
64
from databricks.sdk import WorkspaceClient
5+
from databricks.sdk.service.iam import PermissionLevel
6+
from databricks.sdk.service.jobs import RunResultState, SparkPythonTask
77

88
from databricks.labs.pytester.fixtures.watchdog import TEST_RESOURCE_PURGE_TIMEOUT
99

@@ -30,6 +30,14 @@ def test_job(ws: WorkspaceClient, make_job) -> None:
3030
assert run_state is not None and run_state.result_state == RunResultState.SUCCESS
3131

3232

33+
def test_job_with_spark_python_task(ws: WorkspaceClient, make_job) -> None:
34+
job = make_job(task_type=SparkPythonTask)
35+
run = ws.jobs.run_now(job.job_id)
36+
ws.jobs.wait_get_run_job_terminated_or_skipped(run_id=run.run_id)
37+
run_state = ws.jobs.get_run(run_id=run.run_id).state
38+
assert run_state is not None and run_state.result_state == RunResultState.SUCCESS
39+
40+
3341
def test_pipeline(make_pipeline, make_pipeline_permissions, make_group):
3442
group = make_group()
3543
pipeline = make_pipeline()

0 commit comments

Comments
 (0)