Skip to content

Commit 2fb7336

Browse files
authored
Serverless e2e tests (#1565)
## Changes Two `ucws` specs that run on UC+Serverless workspace: - Serverless workflow - Serverless dbconnect + python environment setup ## Tests tests
1 parent 02c2fea commit 2fb7336

File tree

6 files changed

+386
-52
lines changed

6 files changed

+386
-52
lines changed

packages/databricks-vscode/scripts/list_integration_tests.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ const integrationTests = glob
2121
return {
2222
path: toUnixPath(path.relative(process.cwd(), testPath)),
2323
baseName: path.basename(testPath, ".e2e.ts"),
24+
ucws: testPath.includes(".ucws.") ? true : false,
2425
};
2526
});
2627

packages/databricks-vscode/src/configuration/DatabricksWorkspace.ts

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,8 @@ type ServerlessEnablementResponse = {
1010
setting?: {
1111
value?: {
1212
// eslint-disable-next-line @typescript-eslint/naming-convention
13-
preview_enablement_val?: {
14-
enabled: boolean;
13+
serverless_jobs_notebooks_workspace_enable_val?: {
14+
value: string;
1515
};
1616
};
1717
};
@@ -125,12 +125,13 @@ export class DatabricksWorkspace {
125125
}
126126
try {
127127
const serverlessEnablement = (await client.apiClient.request(
128-
`/api/2.0/settings-api/workspace/${id}/serverless_job_nb`,
128+
`/api/2.0/settings-api/workspace/${id}/serverless_jobs_ws_nb_enable`,
129129
"GET"
130130
)) as ServerlessEnablementResponse;
131131
const enableServerless =
132-
serverlessEnablement?.setting?.value?.preview_enablement_val
133-
?.enabled === true;
132+
serverlessEnablement?.setting?.value
133+
?.serverless_jobs_notebooks_workspace_enable_val?.value ===
134+
"ENABLED";
134135
state = {...state, enableServerless};
135136
} catch (e) {
136137
ctx?.logger?.error("Can't detect serverless support", e);
Lines changed: 252 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,252 @@
1+
import path from "node:path";
2+
import * as fs from "fs/promises";
3+
import assert from "node:assert";
4+
import {
5+
dismissNotifications,
6+
executeCommandWhenAvailable,
7+
getTreeViewItems,
8+
openFile,
9+
waitForInput,
10+
waitForLogin,
11+
waitForNotification,
12+
} from "./utils/commonUtils.ts";
13+
import {
14+
getBasicBundleConfig,
15+
writeRootBundleConfig,
16+
} from "./utils/dabsFixtures.ts";
17+
18+
describe("Run files on serverless compute", async function () {
19+
let projectDir: string;
20+
this.timeout(3 * 60 * 1000);
21+
22+
before(async () => {
23+
assert(process.env.WORKSPACE_PATH, "WORKSPACE_PATH doesn't exist");
24+
25+
projectDir = process.env.WORKSPACE_PATH;
26+
27+
await fs.writeFile(
28+
path.join(projectDir, "requirements.txt"),
29+
["ipykernel", "setuptools"].join("\n")
30+
);
31+
32+
await fs.writeFile(
33+
path.join(projectDir, "lib.py"),
34+
`def func(spark):\treturn spark.sql('SELECT "hello world"')`
35+
);
36+
const nestedDir = path.join(projectDir, "nested");
37+
await fs.mkdir(nestedDir, {recursive: true});
38+
await fs.writeFile(
39+
path.join(nestedDir, "hello.py"),
40+
[
41+
`from lib import func`,
42+
`import os`,
43+
`df = func(spark).toPandas()`,
44+
`df.to_json(os.path.join(os.getcwd(), "file-output.json"))`,
45+
].join("\n")
46+
);
47+
48+
await fs.writeFile(
49+
path.join(nestedDir, "notebook.ipynb"),
50+
JSON.stringify({
51+
/* eslint-disable @typescript-eslint/naming-convention */
52+
cells: [
53+
{
54+
cell_type: "code",
55+
execution_count: null,
56+
metadata: {},
57+
outputs: [],
58+
source: [
59+
`from lib import func`,
60+
`import os`,
61+
`df = func(spark).toPandas()`,
62+
`df.to_json(os.path.join(os.getcwd(), "notebook-output.json"))`,
63+
],
64+
},
65+
],
66+
metadata: {
67+
kernelspec: {
68+
display_name: "Python 3",
69+
language: "python",
70+
name: "python3",
71+
},
72+
orig_nbformat: 4,
73+
},
74+
nbformat: 4,
75+
nbformat_minor: 2,
76+
/* eslint-enable @typescript-eslint/naming-convention */
77+
})
78+
);
79+
80+
await writeRootBundleConfig(
81+
getBasicBundleConfig({}, false),
82+
projectDir
83+
);
84+
});
85+
86+
it("should wait for connection", async () => {
87+
await waitForLogin("DEFAULT");
88+
await dismissNotifications();
89+
const workbench = await driver.getWorkbench();
90+
await workbench.getEditorView().closeAllEditors();
91+
});
92+
93+
it("should prompt to setup virtual environment", async () => {
94+
const subTreeItems = await getTreeViewItems(
95+
"CONFIGURATION",
96+
"Python Environment"
97+
);
98+
let promptFound = false;
99+
for (const item of subTreeItems) {
100+
const label = await item.getLabel();
101+
console.log("Python Environment item label: ", label);
102+
if (label.includes("Activate an environment")) {
103+
promptFound = true;
104+
break;
105+
}
106+
}
107+
assert(promptFound, "Prompt to setup virtual environment not found");
108+
});
109+
110+
it("should select serverless compute", async () => {
111+
await executeCommandWhenAvailable("Databricks: Configure cluster");
112+
const computeInput = await waitForInput();
113+
await computeInput.selectQuickPick("Serverless");
114+
});
115+
116+
it("should setup virtual environment", async () => {
117+
await executeCommandWhenAvailable(
118+
"Databricks: Setup python environment"
119+
);
120+
121+
// Remote runner has miniconda environment preinstalled,
122+
// but we still want to create a local .venv to test the full flow
123+
const selectEnvInput = await waitForInput();
124+
const createNewPick = await selectEnvInput.findQuickPick(
125+
"Create new environment"
126+
);
127+
if (createNewPick) {
128+
console.log("'Create new environment' pick found, selecting");
129+
await createNewPick.select();
130+
} else {
131+
console.log("'Create new environment' pick not found, moving on");
132+
}
133+
134+
// Select Venv as the environment manager
135+
const envTypeInput = await waitForInput();
136+
await envTypeInput.selectQuickPick("Venv");
137+
console.log("Selected Venv as the environment manager");
138+
139+
// Our runners have python 3.12+ preinstalled
140+
const pythonVersionInput = await waitForInput();
141+
await pythonVersionInput.selectQuickPick("Python 3.12");
142+
console.log("Selected Python Version");
143+
144+
// Install dependencies from the requirements.txt
145+
const dependenciesInput = await waitForInput();
146+
await dependenciesInput.toggleAllQuickPicks(true);
147+
await dependenciesInput.confirm();
148+
149+
await waitForNotification("The following environment is selected");
150+
await waitForNotification("Databricks Connect", "Install");
151+
152+
await browser.waitUntil(
153+
async () => {
154+
const workbench = await browser.getWorkbench();
155+
const view = await workbench.getBottomBar().openOutputView();
156+
const outputText = (await view.getText()).join("");
157+
console.log("Output view text: ", outputText);
158+
return (
159+
outputText.includes("Successfully installed") ||
160+
outputText.includes("finished with status 'done'")
161+
);
162+
},
163+
{
164+
timeout: 60_000,
165+
interval: 2000,
166+
timeoutMsg:
167+
"Installation output did not contain 'Successfully installed'",
168+
}
169+
);
170+
171+
// On windows we don't always get a notification after installation (TODO: fix it in the extension code),
172+
// so we need to refresh manually.
173+
await executeCommandWhenAvailable(
174+
"Databricks: Refresh python environment status"
175+
);
176+
177+
await browser.waitUntil(
178+
async () => {
179+
const subTreeItems = await getTreeViewItems(
180+
"CONFIGURATION",
181+
"Python Environment"
182+
);
183+
for (const item of subTreeItems) {
184+
const label = await item.getLabel();
185+
console.log("Python Environment item label: ", label);
186+
if (label.includes("Databricks Connect:")) {
187+
return true;
188+
}
189+
}
190+
return false;
191+
},
192+
{
193+
timeout: 60_000,
194+
interval: 2000,
195+
timeoutMsg: "Setup confirmation failed",
196+
}
197+
);
198+
});
199+
200+
it("should run a python file with dbconnect", async () => {
201+
await openFile("hello.py");
202+
await executeCommandWhenAvailable(
203+
"Databricks: Run current file with Databricks Connect"
204+
);
205+
await browser.waitUntil(
206+
async () => {
207+
const fileOutput = await fs.readFile(
208+
path.join(projectDir, "file-output.json"),
209+
"utf-8"
210+
);
211+
console.log("File output: ", fileOutput);
212+
return fileOutput.includes("hello world");
213+
},
214+
{
215+
timeout: 60_000,
216+
interval: 2000,
217+
timeoutMsg: "Terminal output did not contain 'hello world'",
218+
}
219+
);
220+
});
221+
222+
it("should run a notebook with dbconnect", async () => {
223+
await openFile("notebook.ipynb");
224+
await executeCommandWhenAvailable("Notebook: Run All");
225+
226+
const kernelInput = await waitForInput();
227+
await kernelInput.selectQuickPick("Python Environments...");
228+
console.log(
229+
"Selected 'Python Environments...' option for kernel selection"
230+
);
231+
232+
const envInput = await waitForInput();
233+
await envInput.selectQuickPick(".venv");
234+
console.log("Selected .venv environment");
235+
236+
await browser.waitUntil(
237+
async () => {
238+
const notebookOutput = await fs.readFile(
239+
path.join(projectDir, "nested", "notebook-output.json"),
240+
"utf-8"
241+
);
242+
console.log("Notebook output: ", notebookOutput);
243+
return notebookOutput.includes("hello world");
244+
},
245+
{
246+
timeout: 60_000,
247+
interval: 2000,
248+
timeoutMsg: "Notebook execution did not complete successfully",
249+
}
250+
);
251+
});
252+
});
Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
import path from "node:path";
2+
import * as fs from "fs/promises";
3+
import assert from "node:assert";
4+
import {
5+
dismissNotifications,
6+
executeCommandWhenAvailable,
7+
openFile,
8+
waitForLogin,
9+
waitForWorkflowWebview,
10+
} from "./utils/commonUtils.ts";
11+
import {
12+
getBasicBundleConfig,
13+
writeRootBundleConfig,
14+
} from "./utils/dabsFixtures.ts";
15+
16+
describe("Run files on serverless compute", async function () {
17+
let projectDir: string;
18+
this.timeout(3 * 60 * 1000);
19+
20+
before(async () => {
21+
assert(process.env.WORKSPACE_PATH, "WORKSPACE_PATH doesn't exist");
22+
23+
projectDir = process.env.WORKSPACE_PATH;
24+
25+
await fs.writeFile(
26+
path.join(projectDir, "lib.py"),
27+
[
28+
"def func(spark):",
29+
`\tspark.sql('SELECT "hello world"').show()`,
30+
].join("\n")
31+
);
32+
const nestedDir = path.join(projectDir, "nested");
33+
await fs.mkdir(nestedDir, {recursive: true});
34+
await fs.writeFile(
35+
path.join(nestedDir, "hello.py"),
36+
[`from lib import func`, "func(spark)"].join("\n")
37+
);
38+
39+
await writeRootBundleConfig(
40+
getBasicBundleConfig({}, false),
41+
projectDir
42+
);
43+
});
44+
45+
it("should wait for connection", async () => {
46+
await waitForLogin("DEFAULT");
47+
await dismissNotifications();
48+
});
49+
50+
it("should run a python file as a serverless workflow", async () => {
51+
await openFile("hello.py");
52+
await executeCommandWhenAvailable("Databricks: Run File as Workflow");
53+
// Serverless compute should be selected automatically based on the serverless_compute_id cfg option
54+
await waitForWorkflowWebview(["hello world", "Serverless"]);
55+
});
56+
});

0 commit comments

Comments
 (0)