Skip to content

Commit 1884cf0

Browse files
jcortessergio-eliot-rodriguez
authored andcommitted
Databricks API - Jobs action components (PipedreamHQ#18371)
1 parent ffa140c commit 1884cf0

File tree

36 files changed

+1060
-34
lines changed

36 files changed

+1060
-34
lines changed
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
import app from "../../databricks.app.mjs";
2+
3+
export default {
4+
key: "databricks-cancel-all-runs",
5+
name: "Cancel All Runs",
6+
description: "Cancel all active runs for a job. The runs are canceled asynchronously, so it doesn't prevent new runs from being started. [See the documentation](https://docs.databricks.com/api/workspace/jobs/cancelallruns)",
7+
version: "0.0.1",
8+
type: "action",
9+
props: {
10+
app,
11+
// eslint-disable-next-line pipedream/props-label, pipedream/props-description
12+
info: {
13+
type: "alert",
14+
alertType: "info",
15+
content: "Either a **Job** or **All Queued Runs** must be provided.",
16+
},
17+
jobId: {
18+
optional: true,
19+
propDefinition: [
20+
app,
21+
"jobId",
22+
],
23+
},
24+
allQueuedRuns: {
25+
type: "boolean",
26+
label: "All Queued Runs",
27+
description: "Optional boolean parameter to cancel all queued runs. If no **Job ID** is provided, all queued runs in the workspace are canceled.",
28+
optional: true,
29+
},
30+
},
31+
async run({ $ }) {
32+
const {
33+
app,
34+
jobId,
35+
allQueuedRuns,
36+
} = this;
37+
38+
await app.cancelAllRuns({
39+
$,
40+
data: {
41+
job_id: jobId,
42+
all_queued_runs: allQueuedRuns,
43+
},
44+
});
45+
46+
$.export("$summary", "Successfully initiated cancellation of all runs");
47+
48+
return {
49+
success: true,
50+
};
51+
},
52+
};
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
import app from "../../databricks.app.mjs";
2+
3+
export default {
4+
key: "databricks-cancel-run",
5+
name: "Cancel Run",
6+
description: "Cancel a job run. The run is canceled asynchronously, so it may still be running when this request completes. [See the documentation](https://docs.databricks.com/api/workspace/jobs/cancelrun)",
7+
version: "0.0.1",
8+
type: "action",
9+
props: {
10+
app,
11+
runId: {
12+
propDefinition: [
13+
app,
14+
"runId",
15+
],
16+
},
17+
},
18+
async run({ $ }) {
19+
const {
20+
app,
21+
runId,
22+
} = this;
23+
24+
await app.cancelRun({
25+
$,
26+
data: {
27+
run_id: runId,
28+
},
29+
});
30+
31+
$.export("$summary", `Successfully initiated cancellation of run with ID \`${runId}\`.`);
32+
33+
return {
34+
success: true,
35+
};
36+
},
37+
};

components/databricks/actions/create-endpoint/create-endpoint.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ export default {
55
key: "databricks-create-endpoint",
66
name: "Create Endpoint",
77
description: "Create a new vector search endpoint. [See the documentation](https://docs.databricks.com/api/workspace/vectorsearchendpoints/createendpoint)",
8-
version: "0.0.1",
8+
version: "0.0.2",
99
type: "action",
1010
props: {
1111
databricks,
Lines changed: 237 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,237 @@
1+
import app from "../../databricks.app.mjs";
2+
import utils from "../../common/utils.mjs";
3+
4+
export default {
5+
key: "databricks-create-job",
6+
name: "Create Job",
7+
description: "Create a job. [See the documentation](https://docs.databricks.com/api/workspace/jobs/create)",
8+
version: "0.0.1",
9+
type: "action",
10+
props: {
11+
app,
12+
tasks: {
13+
type: "string[]",
14+
label: "Tasks",
15+
description: `A list of task specifications to be executed by this job. JSON string format. [See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#tasks) for task specification details.
16+
17+
**Example:**
18+
\`\`\`json
19+
[
20+
{
21+
"notebook_task": {
22+
"notebook_path": "/Workspace/Users/[email protected]/weather_ingest"
23+
},
24+
"task_key": "weather_ocean_data"
25+
}
26+
]
27+
\`\`\`
28+
`,
29+
},
30+
name: {
31+
type: "string",
32+
label: "Job Name",
33+
description: "An optional name for the job",
34+
optional: true,
35+
},
36+
tags: {
37+
type: "object",
38+
label: "Tags",
39+
description: "A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags",
40+
optional: true,
41+
},
42+
jobClusters: {
43+
type: "string[]",
44+
label: "Job Clusters",
45+
description: `A list of job cluster specifications that can be shared and reused by tasks of this job. JSON string format. [See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#job_clusters) for job cluster specification details.
46+
47+
**Example:**
48+
\`\`\`json
49+
[
50+
{
51+
"job_cluster_key": "auto_scaling_cluster",
52+
"new_cluster": {
53+
"autoscale": {
54+
"max_workers": 16,
55+
"min_workers": 2
56+
},
57+
"node_type_id": null,
58+
"spark_conf": {
59+
"spark.speculation": true
60+
},
61+
"spark_version": "7.3.x-scala2.12"
62+
}
63+
}
64+
]
65+
\`\`\`
66+
`,
67+
optional: true,
68+
},
69+
emailNotifications: {
70+
type: "string",
71+
label: "Email Notifications",
72+
description: `An optional set of email addresses to notify when runs of this job begin, complete, or when the job is deleted. Specify as a JSON object with keys for each notification type. [See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#email_notifications) for details on each field.
73+
74+
**Example:**
75+
\`\`\`json
76+
{
77+
"on_start": ["[email protected]"],
78+
"on_success": ["[email protected]"],
79+
"on_failure": ["[email protected]"],
80+
"on_duration_warning_threshold_exceeded": ["[email protected]"],
81+
"on_streaming_backlog_exceeded": ["[email protected]"]
82+
}
83+
\`\`\`
84+
`,
85+
optional: true,
86+
},
87+
webhookNotifications: {
88+
type: "string",
89+
label: "Webhook Notifications",
90+
description: `A collection of system notification IDs to notify when runs of this job begin, complete, or encounter specific events. Specify as a JSON object with keys for each notification type. Each key accepts an array of objects with an \`id\` property (system notification ID). A maximum of 3 destinations can be specified for each property.
91+
92+
Supported keys:
93+
- \`on_start\`: Notified when the run starts.
94+
- \`on_success\`: Notified when the run completes successfully.
95+
- \`on_failure\`: Notified when the run fails.
96+
- \`on_duration_warning_threshold_exceeded\`: Notified when the run duration exceeds the specified threshold.
97+
- \`on_streaming_backlog_exceeded\`: Notified when streaming backlog thresholds are exceeded.
98+
99+
[See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#webhook_notifications) for details.
100+
101+
**Example:**
102+
\`\`\`json
103+
{
104+
"on_success": [
105+
{ "id": "https://eoiqkb8yzox6u2n.m.pipedream.net" }
106+
],
107+
"on_failure": [
108+
{ "id": "https://another-webhook-url.com/notify" }
109+
]
110+
}
111+
\`\`\`
112+
`,
113+
optional: true,
114+
},
115+
timeoutSeconds: {
116+
type: "integer",
117+
label: "Timeout Seconds",
118+
description: "An optional timeout applied to each run of this job. The default behavior is to have no timeout",
119+
optional: true,
120+
},
121+
schedule: {
122+
type: "string",
123+
label: "Schedule",
124+
description: `An optional periodic schedule for this job, specified as a JSON object. By default, the job only runs when triggered manually or via the API. The schedule object must include:
125+
126+
- \`quartz_cron_expression\` (**required**): A Cron expression using Quartz syntax that defines when the job runs. [See Cron Trigger details](https://docs.databricks.com/api/workspace/jobs/create#schedule).
127+
- \`timezone_id\` (**required**): A Java timezone ID (e.g., "Europe/London") that determines the timezone for the schedule. [See Java TimeZone details](https://docs.databricks.com/api/workspace/jobs/create#schedule).
128+
- \`pause_status\` (optional): Set to \`"UNPAUSED"\` (default) or \`"PAUSED"\` to control whether the schedule is active.
129+
130+
**Example:**
131+
\`\`\`json
132+
{
133+
"quartz_cron_expression": "0 0 12 * * ?",
134+
"timezone_id": "Asia/Ho_Chi_Minh",
135+
"pause_status": "UNPAUSED"
136+
}
137+
\`\`\`
138+
`,
139+
optional: true,
140+
},
141+
maxConcurrentRuns: {
142+
type: "integer",
143+
label: "Max Concurrent Runs",
144+
description: "An optional maximum allowed number of concurrent runs of the job. Defaults to 1",
145+
optional: true,
146+
},
147+
gitSource: {
148+
type: "string",
149+
label: "Git Source",
150+
description: `An optional specification for a remote Git repository containing the source code used by tasks. Provide as a JSON string.
151+
152+
This enables version-controlled source code for notebook, dbt, Python script, and SQL File tasks. If \`git_source\` is set, these tasks retrieve files from the remote repository by default (can be overridden per task by setting \`source\` to \`WORKSPACE\`). **Note:** dbt and SQL File tasks require \`git_source\` to be defined. [See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#git_source) for more details.
153+
154+
**Fields:**
155+
- \`git_url\` (**required**): URL of the repository to be cloned (e.g., "https://github.com/databricks/databricks-cli").
156+
- \`git_provider\` (**required**): Service hosting the repository. One of: \`gitHub\`, \`bitbucketCloud\`, \`azureDevOpsServices\`, \`gitHubEnterprise\`, \`bitbucketServer\`, \`gitLab\`, \`gitLabEnterpriseEdition\`, \`awsCodeCommit\`.
157+
- \`git_branch\`: Name of the branch to check out (cannot be used with \`git_tag\` or \`git_commit\`).
158+
- \`git_tag\`: Name of the tag to check out (cannot be used with \`git_branch\` or \`git_commit\`).
159+
- \`git_commit\`: Commit hash to check out (cannot be used with \`git_branch\` or \`git_tag\`).
160+
161+
**Example:**
162+
\`\`\`json
163+
{
164+
"git_url": "https://github.com/databricks/databricks-cli",
165+
"git_provider": "gitHub",
166+
"git_branch": "main"
167+
}
168+
\`\`\`
169+
`,
170+
optional: true,
171+
},
172+
accessControlList: {
173+
type: "string[]",
174+
label: "Access Control List",
175+
description: `A list of permissions to set on the job, specified as a JSON array of objects. Each object can define permissions for a user, group, or service principal.
176+
177+
Each object may include:
178+
- \`user_name\`: Name of the user.
179+
- \`group_name\`: Name of the group.
180+
- \`service_principal_name\`: Application ID of a service principal.
181+
- \`permission_level\`: Permission level. One of: \`CAN_MANAGE\`, \`IS_OWNER\`, \`CAN_MANAGE_RUN\`, \`CAN_VIEW\`.
182+
183+
**Example:**
184+
\`\`\`json
185+
[
186+
{
187+
"permission_level": "IS_OWNER",
188+
"user_name": "[email protected]"
189+
},
190+
{
191+
"permission_level": "CAN_VIEW",
192+
"group_name": "data-scientists"
193+
}
194+
]
195+
\`\`\`
196+
[See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#access_control_list) for more details.`,
197+
optional: true,
198+
},
199+
},
200+
async run({ $ }) {
201+
const {
202+
app,
203+
tasks,
204+
name,
205+
tags,
206+
jobClusters,
207+
emailNotifications,
208+
webhookNotifications,
209+
timeoutSeconds,
210+
schedule,
211+
maxConcurrentRuns,
212+
gitSource,
213+
accessControlList,
214+
} = this;
215+
216+
const response = await app.createJob({
217+
$,
218+
data: {
219+
name,
220+
tags,
221+
tasks: utils.parseJsonInput(tasks),
222+
job_clusters: utils.parseJsonInput(jobClusters),
223+
email_notifications: utils.parseJsonInput(emailNotifications),
224+
webhook_notifications: utils.parseJsonInput(webhookNotifications),
225+
timeout_seconds: timeoutSeconds,
226+
schedule: utils.parseJsonInput(schedule),
227+
max_concurrent_runs: maxConcurrentRuns,
228+
git_source: utils.parseJsonInput(gitSource),
229+
access_control_list: utils.parseJsonInput(accessControlList),
230+
},
231+
});
232+
233+
$.export("$summary", `Successfully created job with ID \`${response.job_id}\``);
234+
235+
return response;
236+
},
237+
};

components/databricks/actions/create-sql-warehouse/create-sql-warehouse.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ export default {
77
key: "databricks-create-sql-warehouse",
88
name: "Create SQL Warehouse",
99
description: "Creates a new SQL Warehouse in Databricks. [See the documentation](https://docs.databricks.com/api/workspace/warehouses/create)",
10-
version: "0.0.2",
10+
version: "0.0.3",
1111
type: "action",
1212
props: {
1313
databricks,

components/databricks/actions/delete-endpoint/delete-endpoint.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ export default {
44
key: "databricks-delete-endpoint",
55
name: "Delete Endpoint",
66
description: "Delete a vector search endpoint. [See the documentation](https://docs.databricks.com/api/workspace/vectorsearchendpoints/deleteendpoint)",
7-
version: "0.0.1",
7+
version: "0.0.2",
88
type: "action",
99
props: {
1010
databricks,
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
import app from "../../databricks.app.mjs";
2+
3+
export default {
4+
key: "databricks-delete-job",
5+
name: "Delete Job",
6+
description: "Delete a job. Deleted jobs cannot be recovered. [See the documentation](https://docs.databricks.com/api/workspace/jobs/delete)",
7+
version: "0.0.1",
8+
type: "action",
9+
props: {
10+
app,
11+
jobId: {
12+
propDefinition: [
13+
app,
14+
"jobId",
15+
],
16+
},
17+
},
18+
async run({ $ }) {
19+
const {
20+
app,
21+
jobId,
22+
} = this;
23+
24+
await app.deleteJob({
25+
$,
26+
data: {
27+
job_id: jobId,
28+
},
29+
});
30+
31+
$.export("$summary", `Successfully deleted job with ID \`${jobId}\`.`);
32+
33+
return {
34+
success: true,
35+
};
36+
},
37+
};

0 commit comments

Comments
 (0)