Skip to content
Merged
Show file tree
Hide file tree
Changes from 22 commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
97b92e1
Added Databricks SQL Warehouses API actions
Lokeshchand33 Aug 22, 2025
5a697bd
Update Databricks SQL Warehouse docs URLs
Lokeshchand33 Aug 22, 2025
6623be2
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Aug 23, 2025
8343661
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Aug 27, 2025
9292e93
fix(databricks): bump component versions and apply lint fixes
Lokeshchand33 Aug 28, 2025
6a9646c
fix(databricks): addressed requested changes
Lokeshchand33 Aug 29, 2025
d66788b
addressed coderabbit review feedback
Lokeshchand33 Aug 29, 2025
e120588
resolved the linting issues
Lokeshchand33 Aug 29, 2025
5238430
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Aug 30, 2025
e742ec2
addressed all test failures
Lokeshchand33 Sep 1, 2025
01ed509
addressed coderabbit review feedback
Lokeshchand33 Sep 1, 2025
d83d206
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Sep 1, 2025
49e997c
resolved the linting issues
Lokeshchand33 Sep 1, 2025
0535802
addressed coderabbit review feedback
Lokeshchand33 Sep 1, 2025
b98476c
addressed coderabbit review feedback
Lokeshchand33 Sep 1, 2025
2153ac3
resolved the linting issues
Lokeshchand33 Sep 1, 2025
b04a050
updates
michelle0927 Sep 1, 2025
2222816
Add default value for maxNumClusters
vunguyenhung Sep 2, 2025
2aeacf2
create and edit sql warehouses fixes
Lokeshchand33 Sep 2, 2025
9bfe023
create and edit sql warehouse fixes
Lokeshchand33 Sep 2, 2025
99dfc76
updates
michelle0927 Sep 2, 2025
62287c7
Added Vector Search Index API actions
Lokeshchand33 Sep 4, 2025
ee33ab4
addressed coderabbit review feedback
Lokeshchand33 Sep 4, 2025
25fdea5
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Sep 5, 2025
b7e9fd4
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Sep 8, 2025
a13c04c
version updated
Lokeshchand33 Sep 8, 2025
30e5b6c
resolved the linting issues
Lokeshchand33 Sep 8, 2025
dfb8fd9
Merge branch 'PipedreamHQ:master' into databricks-sql-warehouses
Lokeshchand33 Sep 10, 2025
47fe441
addressed all test failures
lokesh154 Sep 13, 2025
e4a1037
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Sep 13, 2025
9bcacc3
addressed coderabbit review feedback
Lokeshchand33 Sep 13, 2025
13a1c59
addressed coderabbit review feedback
Lokeshchand33 Sep 13, 2025
9ea188a
addressed coderabbit review feedback
Lokeshchand33 Sep 14, 2025
ebea510
updated
Lokeshchand33 Sep 15, 2025
75214fc
updated
Lokeshchand33 Sep 15, 2025
8666764
updates
Lokeshchand33 Sep 15, 2025
30b514d
fixed failed test cases
Lokeshchand33 Sep 16, 2025
86b4d47
updated
Lokeshchand33 Sep 17, 2025
da2fa27
updated
Lokeshchand33 Sep 17, 2025
f9aa39e
updated
Lokeshchand33 Sep 17, 2025
008e23b
fixed failed test cases
Lokeshchand33 Sep 17, 2025
8fb3072
fixed failed test cases
Lokeshchand33 Sep 18, 2025
1258a7c
resolved conflict
Lokeshchand33 Sep 18, 2025
8562f9f
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Sep 18, 2025
427452e
updated
Lokeshchand33 Sep 18, 2025
577233e
version updated
Lokeshchand33 Sep 18, 2025
8df764b
updated
Lokeshchand33 Sep 19, 2025
af3ef3b
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Sep 19, 2025
db3b6cd
updated
Lokeshchand33 Sep 21, 2025
6b54d05
Merge branch 'master' into databricks-sql-warehouses
michelle0927 Sep 23, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
import databricks from "../../databricks.app.mjs";
import constants from "../../common/constants.mjs";
import utils from "../../common/utils.mjs";
import { ConfigurationError } from "@pipedream/platform";

export default {
key: "databricks-create-sql-warehouse",
name: "Create SQL Warehouse",
description: "Creates a new SQL Warehouse in Databricks. [See the documentation](https://docs.databricks.com/api/workspace/warehouses/create)",
version: "0.0.3",
type: "action",
props: {
databricks,
name: {
type: "string",
label: "Warehouse Name",
description: "A human-readable name for the warehouse",
},
clusterSize: {
type: "string",
label: "Cluster Size",
description: "Size of the cluster",
options: constants.CLUSTER_SIZES,
},
autoStopMinutes: {
type: "integer",
label: "Auto Stop (minutes)",
description:
"Minutes of inactivity before auto-stop. 0 disables auto-stop. Must be 0 or ≥ 10.",
optional: true,
default: 10,
},
minNumClusters: {
type: "integer",
label: "Min Number of Clusters",
description: "Minimum number of clusters to maintain (> 0 and ≤ min(max_num_clusters, 30)).",
optional: true,
default: 1,
},
maxNumClusters: {
type: "integer",
label: "Max Number of Clusters",
description: "Maximum number of clusters for autoscaler (≥ min_num_clusters and ≤ 30).",
optional: true,
default: 1,
},
enablePhoton: {
type: "boolean",
label: "Enable Photon",
description: "Whether the warehouse should use Photon optimized clusters.",
optional: true,
},
enableServerlessCompute: {
type: "boolean",
label: "Enable Serverless Compute",
description: "Whether the warehouse should use serverless compute.",
optional: true,
},
warehouseType: {
type: "string",
label: "Warehouse Type",
description:
"Warehouse type: PRO or CLASSIC. Set PRO + enableServerlessCompute = true to use serverless.",
options: [
"TYPE_UNSPECIFIED",
"CLASSIC",
"PRO",
],
optional: true,
},
spotInstancePolicy: {
type: "string",
label: "Spot Instance Policy",
description: "Configures whether the warehouse should use spot instances.",
options: [
"POLICY_UNSPECIFIED",
"COST_OPTIMIZED",
"RELIABILITY_OPTIMIZED",
],
optional: true,
},
channel: {
type: "object",
label: "Channel",
description:
"Channel details. Example: `{ \"name\": \"CHANNEL_NAME_CUSTOM\", \"dbsql_version\": \"2023.35\" }`",
optional: true,
},
tags: {
type: "object",
label: "Tags",
description:
"Custom key-value tags for resources associated with this SQL Warehouse.",
optional: true,
},
instanceProfileArn: {
type: "string",
label: "Instance Profile ARN (Deprecated)",
description: "Deprecated. Instance profile used to pass IAM role to the cluster.",
optional: true,
},
},

async run({ $ }) {
const payload = {
name: this.name,
cluster_size: this.clusterSize,
};

if (this.autoStopMinutes !== undefined) {
if (this.autoStopMinutes !== 0 && this.autoStopMinutes < 10) {
throw new ConfigurationError("autoStopMinutes must be 0 or ≥ 10.");
}
payload.auto_stop_mins = this.autoStopMinutes;
}

const minNumClusters = this.minNumClusters ?? 1;
if (minNumClusters < 1 || minNumClusters > 30) {
throw new ConfigurationError("minNumClusters must be between 1 and 30.");
}
payload.min_num_clusters = minNumClusters;

if (this.maxNumClusters !== undefined) {
if (
this.maxNumClusters < payload.min_num_clusters ||
this.maxNumClusters > 30
) {
throw new ConfigurationError(
`maxNumClusters must be ≥ minNumClusters (${payload.min_num_clusters}) and ≤ 30.`,
);
}
payload.max_num_clusters = this.maxNumClusters;
}

const parsedTags = utils.parseObject(this.tags);
const tagArray = Object.entries(parsedTags).map(([
key,
value,
]) => ({
key,
value,
}));
if (tagArray.length) {
payload.tags = {
custom_tags: tagArray,
};
}

if (this.enablePhoton !== undefined)
payload.enable_photon = this.enablePhoton;
if (this.enableServerlessCompute !== undefined)
payload.enable_serverless_compute = this.enableServerlessCompute;
if (this.warehouseType) payload.warehouse_type = this.warehouseType;
if (this.spotInstancePolicy)
payload.spot_instance_policy = this.spotInstancePolicy;
if (this.channel) payload.channel = utils.parseObject(this.channel);
if (this.instanceProfileArn)
payload.instance_profile_arn = this.instanceProfileArn;

const response = await this.databricks.createSQLWarehouse({
data: payload,
$,
});

$.export(
"$summary",
`Successfully created SQL Warehouse: ${response?.name || this.name}`,
);
return response;
},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import databricks from "../../databricks.app.mjs";
import utils from "../../common/utils.mjs";
import { ConfigurationError } from "@pipedream/platform";

export default {
key: "databricks-create-vector-search-index",
name: "Create Vector Search Index",
description:
"Creates a new vector search index in Databricks. [See the documentation](https://docs.databricks.com/api/workspace/vectorsearchindexes/createindex)",
version: "0.0.1",
type: "action",
props: {
databricks,
name: {
type: "string",
label: "Index Name",
description:
"A unique name for the index (e.g., `main_catalog.docs.en_wiki_index`).",
},
endpointName: {
type: "string",
label: "Endpoint Name",
description: "The vector search endpoint that will serve the index.",
},
indexType: {
type: "string",
label: "Index Type",
description: "Type of index (`DELTA_SYNC` or `DIRECT_ACCESS`).",
options: ["DELTA_SYNC", "DIRECT_ACCESS"],
},
primaryKey: {
type: "string",
label: "Primary Key",
description: "The primary key column for the index.",
},
sourceTable: {
type: "string",
label: "Source Table",
description:
"The Delta table backing the index (required if `indexType` is `DELTA_SYNC`).",
optional: true,
},
columnsToSync: {
type: "string[]",
label: "Columns to Sync",
description:
"List of columns to sync from the source Delta table. Example: `[\"id\", \"text\"]`",
optional: true,
},
embeddingSourceColumns: {
type: "string[]",
label: "Embedding Source Columns",
description:
"List of embedding source column configs. Each entry should be a JSON object string like `{ \"embedding_model_endpoint_name\": \"e5-small-v2\", \"name\": \"text\" }`",
optional: true,
},
pipelineType: {
type: "string",
label: "Pipeline Type",
description: "Pipeline type for syncing (default: TRIGGERED).",
options: ["TRIGGERED", "CONTINUOUS"],
optional: true,
default: "TRIGGERED",
},
},

async run({ $ }) {
const payload = {
name: this.name,
endpoint_name: this.endpointName,
index_type: this.indexType,
primary_key: this.primaryKey,
};

if (this.indexType === "DELTA_SYNC") {
if (!this.sourceTable) {
throw new ConfigurationError(
"sourceTable is required when indexType is DELTA_SYNC."
);
}

const columnsToSync = utils.parseObject(this.columnsToSync);
const embeddingSourceColumns = utils.parseObject(
this.embeddingSourceColumns
);

if (!Array.isArray(columnsToSync) || !columnsToSync.length) {
throw new ConfigurationError(
"columnsToSync must be a non-empty array for DELTA_SYNC indexes."
);
}
if (
!Array.isArray(embeddingSourceColumns) ||
!embeddingSourceColumns.length
) {
throw new ConfigurationError(
"embeddingSourceColumns must be a non-empty array for DELTA_SYNC indexes."
);
}

payload.delta_sync_index_spec = {
source_table: this.sourceTable,
pipeline_type: this.pipelineType || "TRIGGERED",
columns_to_sync: columnsToSync,
embedding_source_columns: embeddingSourceColumns,
};
}

const response = await this.databricks.createVectorSearchIndex({
data: payload,
$,
});

$.export(
"$summary",
`Successfully created vector search index: ${response?.name || this.name}`
);
return response;
},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import databricks from "../../databricks.app.mjs";

export default {
key: "databricks-delete-sql-warehouse",
name: "Delete SQL Warehouse",
description: "Deletes a SQL Warehouse by ID. [See the documentation](https://docs.databricks.com/api/workspace/warehouses/delete)",
version: "0.0.3",
type: "action",
props: {
databricks,
warehouseId: {
description: "The ID of the SQL Warehouse to delete",
propDefinition: [
databricks,
"warehouseId",
],
},
},
async run({ $ }) {
await this.databricks.deleteSQLWarehouse({
warehouseId: this.warehouseId,
$,
});

$.export("$summary", `Successfully deleted SQL Warehouse with ID ${this.warehouseId}`);
return {
success: true,
};
},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import databricks from "../../databricks.app.mjs";

export default {
key: "databricks-delete-data-from-vector-index",
name: "Delete Data from Vector Search Index",
description:
"Deletes rows from a Direct Access vector index by primary-key values. [See the documentation](https://docs.databricks.com/api/workspace/vectorsearchindexes/deletedatavectorindex)",
version: "0.0.1",
type: "action",
props: {
databricks,
indexName: {
propDefinition: [
databricks,
"indexName",
],
},
primaryKeys: {
type: "string[]",
label: "Primary Keys",
description:
"Values of the index’s primary key column to delete (e.g. `1`, `2`). These are the values for the column you set as `primary_key` when the index was created.",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
"Values of the indexs primary key column to delete (e.g. `1`, `2`). These are the values for the column you set as `primary_key` when the index was created.",
"Values of the index's primary key column to delete (e.g. `1`, `2`). These are the values for the column you set as `primary_key` when the index was created.",

},
},
async run({ $ }) {
const keys = (this.primaryKeys || [])
.map((s) => String(s).trim())
.filter(Boolean);

if (!keys.length) {
throw new Error("Please provide at least one primary key to delete.");
}

const response = await this.databricks.deleteVectorSearchData({
indexName: this.indexName,
params: { primary_keys: keys },
$,
});

$.export(
"$summary",
`Requested delete of ${keys.length} row(s) from index "${this.indexName}".`,
);
return response;
},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import databricks from "../../databricks.app.mjs";

export default {
key: "databricks-delete-vector-search-index",
name: "Delete Vector Search Index",
description: "Deletes a vector search index in Databricks. [See the documentation](https://docs.databricks.com/api/workspace/vectorsearchindexes/deleteindex)",
version: "0.0.1",
type: "action",
props: {
databricks,
indexName: {
propDefinition: [
databricks,
"indexName",
],
},
},
async run({ $ }) {
const response = await this.databricks.deleteVectorSearchIndex({
indexName: this.indexName,
$,
});

$.export("$summary", `Successfully deleted vector search index: ${this.indexName}`);
return response;
},
};
Loading