-
Notifications
You must be signed in to change notification settings - Fork 5.5k
Databricks API - Jobs action components #18371
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
52 changes: 52 additions & 0 deletions
52
components/databricks/actions/cancel-all-runs/cancel-all-runs.mjs
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,52 @@ | ||
| import app from "../../databricks.app.mjs"; | ||
|
|
||
| export default { | ||
| key: "databricks-cancel-all-runs", | ||
| name: "Cancel All Runs", | ||
| description: "Cancel all active runs for a job. The runs are canceled asynchronously, so it doesn't prevent new runs from being started. [See the documentation](https://docs.databricks.com/api/workspace/jobs/cancelallruns)", | ||
| version: "0.0.1", | ||
| type: "action", | ||
| props: { | ||
| app, | ||
| // eslint-disable-next-line pipedream/props-label, pipedream/props-description | ||
| info: { | ||
| type: "alert", | ||
| alertType: "info", | ||
| content: "Either a **Job** or **All Queued Runs** must be provided.", | ||
| }, | ||
| jobId: { | ||
| optional: true, | ||
| propDefinition: [ | ||
| app, | ||
| "jobId", | ||
| ], | ||
| }, | ||
| allQueuedRuns: { | ||
| type: "boolean", | ||
| label: "All Queued Runs", | ||
| description: "Optional boolean parameter to cancel all queued runs. If no **Job ID** is provided, all queued runs in the workspace are canceled.", | ||
| optional: true, | ||
| }, | ||
| }, | ||
| async run({ $ }) { | ||
| const { | ||
| app, | ||
| jobId, | ||
| allQueuedRuns, | ||
| } = this; | ||
|
|
||
| await app.cancelAllRuns({ | ||
| $, | ||
| data: { | ||
| job_id: jobId, | ||
| all_queued_runs: allQueuedRuns, | ||
| }, | ||
| }); | ||
|
|
||
| $.export("$summary", "Successfully initiated cancellation of all runs"); | ||
|
|
||
| return { | ||
| success: true, | ||
| }; | ||
| }, | ||
| }; | ||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,37 @@ | ||
| import app from "../../databricks.app.mjs"; | ||
|
|
||
| export default { | ||
| key: "databricks-cancel-run", | ||
| name: "Cancel Run", | ||
| description: "Cancel a job run. The run is canceled asynchronously, so it may still be running when this request completes. [See the documentation](https://docs.databricks.com/api/workspace/jobs/cancelrun)", | ||
| version: "0.0.1", | ||
| type: "action", | ||
| props: { | ||
| app, | ||
| runId: { | ||
| propDefinition: [ | ||
| app, | ||
| "runId", | ||
| ], | ||
| }, | ||
| }, | ||
| async run({ $ }) { | ||
| const { | ||
| app, | ||
| runId, | ||
| } = this; | ||
|
|
||
| await app.cancelRun({ | ||
| $, | ||
| data: { | ||
| run_id: runId, | ||
| }, | ||
| }); | ||
|
|
||
| $.export("$summary", `Successfully initiated cancellation of run with ID \`${runId}\`.`); | ||
|
|
||
| return { | ||
| success: true, | ||
| }; | ||
| }, | ||
| }; |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
237 changes: 237 additions & 0 deletions
237
components/databricks/actions/create-job/create-job.mjs
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,237 @@ | ||
| import app from "../../databricks.app.mjs"; | ||
| import utils from "../../common/utils.mjs"; | ||
|
|
||
| export default { | ||
| key: "databricks-create-job", | ||
| name: "Create Job", | ||
| description: "Create a job. [See the documentation](https://docs.databricks.com/api/workspace/jobs/create)", | ||
| version: "0.0.1", | ||
| type: "action", | ||
| props: { | ||
| app, | ||
| tasks: { | ||
| type: "string[]", | ||
| label: "Tasks", | ||
| description: `A list of task specifications to be executed by this job. JSON string format. [See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#tasks) for task specification details. | ||
|
|
||
| **Example:** | ||
| \`\`\`json | ||
| [ | ||
| { | ||
| "notebook_task": { | ||
| "notebook_path": "/Workspace/Users/[email protected]/weather_ingest" | ||
| }, | ||
| "task_key": "weather_ocean_data" | ||
| } | ||
| ] | ||
| \`\`\` | ||
| `, | ||
| }, | ||
jcortes marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| name: { | ||
| type: "string", | ||
| label: "Job Name", | ||
| description: "An optional name for the job", | ||
| optional: true, | ||
| }, | ||
| tags: { | ||
| type: "object", | ||
| label: "Tags", | ||
| description: "A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs clusters, and are subject to the same limitations as cluster tags", | ||
| optional: true, | ||
| }, | ||
| jobClusters: { | ||
| type: "string[]", | ||
| label: "Job Clusters", | ||
| description: `A list of job cluster specifications that can be shared and reused by tasks of this job. JSON string format. [See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#job_clusters) for job cluster specification details. | ||
|
|
||
| **Example:** | ||
| \`\`\`json | ||
| [ | ||
| { | ||
| "job_cluster_key": "auto_scaling_cluster", | ||
| "new_cluster": { | ||
| "autoscale": { | ||
| "max_workers": 16, | ||
| "min_workers": 2 | ||
| }, | ||
| "node_type_id": null, | ||
| "spark_conf": { | ||
| "spark.speculation": true | ||
| }, | ||
| "spark_version": "7.3.x-scala2.12" | ||
| } | ||
| } | ||
| ] | ||
| \`\`\` | ||
| `, | ||
| optional: true, | ||
| }, | ||
| emailNotifications: { | ||
| type: "string", | ||
| label: "Email Notifications", | ||
| description: `An optional set of email addresses to notify when runs of this job begin, complete, or when the job is deleted. Specify as a JSON object with keys for each notification type. [See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#email_notifications) for details on each field. | ||
|
|
||
| **Example:** | ||
| \`\`\`json | ||
| { | ||
| "on_start": ["[email protected]"], | ||
| "on_success": ["[email protected]"], | ||
| "on_failure": ["[email protected]"], | ||
| "on_duration_warning_threshold_exceeded": ["[email protected]"], | ||
| "on_streaming_backlog_exceeded": ["[email protected]"] | ||
| } | ||
| \`\`\` | ||
| `, | ||
| optional: true, | ||
| }, | ||
| webhookNotifications: { | ||
| type: "string", | ||
| label: "Webhook Notifications", | ||
| description: `A collection of system notification IDs to notify when runs of this job begin, complete, or encounter specific events. Specify as a JSON object with keys for each notification type. Each key accepts an array of objects with an \`id\` property (system notification ID). A maximum of 3 destinations can be specified for each property. | ||
|
|
||
| Supported keys: | ||
| - \`on_start\`: Notified when the run starts. | ||
| - \`on_success\`: Notified when the run completes successfully. | ||
| - \`on_failure\`: Notified when the run fails. | ||
| - \`on_duration_warning_threshold_exceeded\`: Notified when the run duration exceeds the specified threshold. | ||
| - \`on_streaming_backlog_exceeded\`: Notified when streaming backlog thresholds are exceeded. | ||
|
|
||
| [See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#webhook_notifications) for details. | ||
|
|
||
| **Example:** | ||
| \`\`\`json | ||
| { | ||
| "on_success": [ | ||
| { "id": "https://eoiqkb8yzox6u2n.m.pipedream.net" } | ||
| ], | ||
| "on_failure": [ | ||
| { "id": "https://another-webhook-url.com/notify" } | ||
| ] | ||
| } | ||
| \`\`\` | ||
| `, | ||
| optional: true, | ||
| }, | ||
| timeoutSeconds: { | ||
| type: "integer", | ||
| label: "Timeout Seconds", | ||
| description: "An optional timeout applied to each run of this job. The default behavior is to have no timeout", | ||
| optional: true, | ||
| }, | ||
| schedule: { | ||
| type: "string", | ||
| label: "Schedule", | ||
| description: `An optional periodic schedule for this job, specified as a JSON object. By default, the job only runs when triggered manually or via the API. The schedule object must include: | ||
|
|
||
| - \`quartz_cron_expression\` (**required**): A Cron expression using Quartz syntax that defines when the job runs. [See Cron Trigger details](https://docs.databricks.com/api/workspace/jobs/create#schedule). | ||
| - \`timezone_id\` (**required**): A Java timezone ID (e.g., "Europe/London") that determines the timezone for the schedule. [See Java TimeZone details](https://docs.databricks.com/api/workspace/jobs/create#schedule). | ||
| - \`pause_status\` (optional): Set to \`"UNPAUSED"\` (default) or \`"PAUSED"\` to control whether the schedule is active. | ||
|
|
||
| **Example:** | ||
| \`\`\`json | ||
| { | ||
| "quartz_cron_expression": "0 0 12 * * ?", | ||
| "timezone_id": "Asia/Ho_Chi_Minh", | ||
| "pause_status": "UNPAUSED" | ||
| } | ||
| \`\`\` | ||
| `, | ||
| optional: true, | ||
| }, | ||
| maxConcurrentRuns: { | ||
| type: "integer", | ||
| label: "Max Concurrent Runs", | ||
| description: "An optional maximum allowed number of concurrent runs of the job. Defaults to 1", | ||
| optional: true, | ||
| }, | ||
| gitSource: { | ||
| type: "string", | ||
| label: "Git Source", | ||
| description: `An optional specification for a remote Git repository containing the source code used by tasks. Provide as a JSON string. | ||
|
|
||
| This enables version-controlled source code for notebook, dbt, Python script, and SQL File tasks. If \`git_source\` is set, these tasks retrieve files from the remote repository by default (can be overridden per task by setting \`source\` to \`WORKSPACE\`). **Note:** dbt and SQL File tasks require \`git_source\` to be defined. [See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#git_source) for more details. | ||
|
|
||
| **Fields:** | ||
| - \`git_url\` (**required**): URL of the repository to be cloned (e.g., "https://github.com/databricks/databricks-cli"). | ||
| - \`git_provider\` (**required**): Service hosting the repository. One of: \`gitHub\`, \`bitbucketCloud\`, \`azureDevOpsServices\`, \`gitHubEnterprise\`, \`bitbucketServer\`, \`gitLab\`, \`gitLabEnterpriseEdition\`, \`awsCodeCommit\`. | ||
| - \`git_branch\`: Name of the branch to check out (cannot be used with \`git_tag\` or \`git_commit\`). | ||
| - \`git_tag\`: Name of the tag to check out (cannot be used with \`git_branch\` or \`git_commit\`). | ||
| - \`git_commit\`: Commit hash to check out (cannot be used with \`git_branch\` or \`git_tag\`). | ||
|
|
||
| **Example:** | ||
| \`\`\`json | ||
| { | ||
| "git_url": "https://github.com/databricks/databricks-cli", | ||
| "git_provider": "gitHub", | ||
| "git_branch": "main" | ||
| } | ||
| \`\`\` | ||
| `, | ||
| optional: true, | ||
| }, | ||
| accessControlList: { | ||
| type: "string[]", | ||
| label: "Access Control List", | ||
| description: `A list of permissions to set on the job, specified as a JSON array of objects. Each object can define permissions for a user, group, or service principal. | ||
|
|
||
| Each object may include: | ||
| - \`user_name\`: Name of the user. | ||
| - \`group_name\`: Name of the group. | ||
| - \`service_principal_name\`: Application ID of a service principal. | ||
| - \`permission_level\`: Permission level. One of: \`CAN_MANAGE\`, \`IS_OWNER\`, \`CAN_MANAGE_RUN\`, \`CAN_VIEW\`. | ||
|
|
||
| **Example:** | ||
| \`\`\`json | ||
| [ | ||
| { | ||
| "permission_level": "IS_OWNER", | ||
| "user_name": "[email protected]" | ||
| }, | ||
| { | ||
| "permission_level": "CAN_VIEW", | ||
| "group_name": "data-scientists" | ||
| } | ||
| ] | ||
| \`\`\` | ||
| [See the API documentation](https://docs.databricks.com/api/workspace/jobs/create#access_control_list) for more details.`, | ||
| optional: true, | ||
| }, | ||
| }, | ||
| async run({ $ }) { | ||
| const { | ||
| app, | ||
| tasks, | ||
| name, | ||
| tags, | ||
| jobClusters, | ||
| emailNotifications, | ||
| webhookNotifications, | ||
| timeoutSeconds, | ||
| schedule, | ||
| maxConcurrentRuns, | ||
| gitSource, | ||
| accessControlList, | ||
| } = this; | ||
|
|
||
| const response = await app.createJob({ | ||
| $, | ||
| data: { | ||
| name, | ||
| tags, | ||
| tasks: utils.parseJsonInput(tasks), | ||
| job_clusters: utils.parseJsonInput(jobClusters), | ||
| email_notifications: utils.parseJsonInput(emailNotifications), | ||
| webhook_notifications: utils.parseJsonInput(webhookNotifications), | ||
| timeout_seconds: timeoutSeconds, | ||
| schedule: utils.parseJsonInput(schedule), | ||
| max_concurrent_runs: maxConcurrentRuns, | ||
| git_source: utils.parseJsonInput(gitSource), | ||
| access_control_list: utils.parseJsonInput(accessControlList), | ||
| }, | ||
jcortes marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| }); | ||
|
|
||
| $.export("$summary", `Successfully created job with ID \`${response.job_id}\``); | ||
|
|
||
| return response; | ||
| }, | ||
| }; | ||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,37 @@ | ||
| import app from "../../databricks.app.mjs"; | ||
|
|
||
| export default { | ||
| key: "databricks-delete-job", | ||
| name: "Delete Job", | ||
| description: "Delete a job. Deleted jobs cannot be recovered. [See the documentation](https://docs.databricks.com/api/workspace/jobs/delete)", | ||
| version: "0.0.1", | ||
| type: "action", | ||
| props: { | ||
| app, | ||
| jobId: { | ||
| propDefinition: [ | ||
| app, | ||
| "jobId", | ||
| ], | ||
| }, | ||
| }, | ||
| async run({ $ }) { | ||
| const { | ||
| app, | ||
| jobId, | ||
| } = this; | ||
|
|
||
| await app.deleteJob({ | ||
| $, | ||
| data: { | ||
| job_id: jobId, | ||
| }, | ||
| }); | ||
|
|
||
| $.export("$summary", `Successfully deleted job with ID \`${jobId}\`.`); | ||
|
|
||
| return { | ||
| success: true, | ||
| }; | ||
| }, | ||
| }; |
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
Uh oh!
There was an error while loading. Please reload this page.