Skip to content

Commit fac670d

Browse files
committed
Create workspace file for SparkPythonTask
1 parent 71fad6b commit fac670d

File tree

2 files changed

+15
-4
lines changed

2 files changed

+15
-4
lines changed

src/databricks/labs/pytester/fixtures/compute.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,14 @@ def create(*, instance_pool_name=None, node_type_id=None, **kwargs) -> CreateIns
162162

163163

164164
@fixture
165-
def make_job(ws, make_random, make_notebook, log_workspace_link, watchdog_remove_after) -> Generator[Job, None, None]:
165+
def make_job(
166+
ws,
167+
make_random,
168+
make_notebook,
169+
make_workspace_file,
170+
log_workspace_link,
171+
watchdog_remove_after,
172+
) -> Generator[Job, None, None]:
166173
"""
167174
Create a Databricks job and clean it up after the test. Returns a function to create jobs, that returns
168175
a `databricks.sdk.service.jobs.Job` instance.
@@ -212,7 +219,6 @@ def create(
212219
raise ValueError(
213220
"The `tasks` parameter is exclusive with the `path`, `content` `spark_conf` and `libraries` parameters."
214221
)
215-
path = path or make_notebook(content=content)
216222
name = name or f"dummy-j{make_random(4)}"
217223
tags = tags or {}
218224
tags["RemoveAfter"] = tags.get("RemoveAfter", watchdog_remove_after)
@@ -230,8 +236,10 @@ def create(
230236
timeout_seconds=0,
231237
)
232238
if task_type == SparkPythonTask:
239+
path = path or make_workspace_file(content=content)
233240
task.spark_python_task = SparkPythonTask(python_file=str(path))
234241
else:
242+
path = path or make_notebook(content=content)
235243
task.notebook_task = NotebookTask(notebook_path=str(path))
236244
tasks = [task]
237245
response = ws.jobs.create(name=name, tasks=tasks, tags=tags)

tests/unit/fixtures/test_compute.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,16 +62,19 @@ def test_make_job_with_content() -> None:
6262
tasks = job.settings.tasks
6363
assert len(tasks) == 1
6464
workspace_path = WorkspacePath(ctx["ws"], tasks[0].notebook_task.notebook_path)
65+
assert not workspace_path.suffix # Notebooks have no suffix
6566
assert workspace_path.read_text() == "print(2)"
6667

6768

6869
def test_make_job_with_spark_python_task() -> None:
69-
_, job = call_stateful(make_job, path="test.py", task_type=SparkPythonTask)
70+
ctx, job = call_stateful(make_job, content="print(3)", task_type=SparkPythonTask)
7071
tasks = job.settings.tasks
7172
assert len(tasks) == 1
7273
assert tasks[0].notebook_task is None
7374
assert tasks[0].spark_python_task is not None
74-
assert tasks[0].spark_python_task.python_file == "test.py"
75+
workspace_path = WorkspacePath(ctx["ws"], tasks[0].spark_python_task.python_file)
76+
assert workspace_path.suffix == ".py" # Python files have suffix
77+
assert workspace_path.read_text() == "print(3)"
7578

7679

7780
def test_make_job_with_spark_conf() -> None:

0 commit comments

Comments
 (0)