Skip to content

Commit c68123f

Browse files
Merge branch 'master' into token-metrics-v3-upgrade
2 parents 6e48a04 + 946cafd commit c68123f

File tree

43 files changed

+749
-34
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+749
-34
lines changed

components/databricks/actions/cancel-all-runs/cancel-all-runs.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ export default {
44
key: "databricks-cancel-all-runs",
55
name: "Cancel All Runs",
66
description: "Cancel all active runs for a job. The runs are canceled asynchronously, so it doesn't prevent new runs from being started. [See the documentation](https://docs.databricks.com/api/workspace/jobs/cancelallruns)",
7-
version: "0.0.1",
7+
version: "0.0.2",
88
type: "action",
99
props: {
1010
app,

components/databricks/actions/cancel-run/cancel-run.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ export default {
44
key: "databricks-cancel-run",
55
name: "Cancel Run",
66
description: "Cancel a job run. The run is canceled asynchronously, so it may still be running when this request completes. [See the documentation](https://docs.databricks.com/api/workspace/jobs/cancelrun)",
7-
version: "0.0.1",
7+
version: "0.0.2",
88
type: "action",
99
props: {
1010
app,

components/databricks/actions/create-endpoint/create-endpoint.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ export default {
55
key: "databricks-create-endpoint",
66
name: "Create Endpoint",
77
description: "Create a new vector search endpoint. [See the documentation](https://docs.databricks.com/api/workspace/vectorsearchendpoints/createendpoint)",
8-
version: "0.0.2",
8+
version: "0.0.3",
99
type: "action",
1010
props: {
1111
databricks,

components/databricks/actions/create-job/create-job.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ export default {
55
key: "databricks-create-job",
66
name: "Create Job",
77
description: "Create a job. [See the documentation](https://docs.databricks.com/api/workspace/jobs/create)",
8-
version: "0.0.1",
8+
version: "0.0.2",
99
type: "action",
1010
props: {
1111
app,

components/databricks/actions/create-sql-warehouse/create-sql-warehouse.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ export default {
77
key: "databricks-create-sql-warehouse",
88
name: "Create SQL Warehouse",
99
description: "Creates a new SQL Warehouse in Databricks. [See the documentation](https://docs.databricks.com/api/workspace/warehouses/create)",
10-
version: "0.0.3",
10+
version: "0.0.4",
1111
type: "action",
1212
props: {
1313
databricks,
Lines changed: 163 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,163 @@
1+
import databricks from "../../databricks.app.mjs";
2+
import utils from "../../common/utils.mjs";
3+
import { ConfigurationError } from "@pipedream/platform";
4+
5+
export default {
6+
key: "databricks-create-vector-search-index",
7+
name: "Create Vector Search Index",
8+
description:
9+
"Creates a new vector search index in Databricks. [See the documentation](https://docs.databricks.com/api/workspace/vectorsearchindexes/createindex)",
10+
version: "0.0.1",
11+
type: "action",
12+
props: {
13+
databricks,
14+
name: {
15+
type: "string",
16+
label: "Index Name",
17+
description:
18+
"A unique name for the index (e.g., `main_catalog.docs.en_wiki_index`).",
19+
},
20+
endpointName: {
21+
propDefinition: [
22+
databricks,
23+
"endpointName",
24+
],
25+
},
26+
indexType: {
27+
type: "string",
28+
label: "Index Type",
29+
description: "Type of index (`DELTA_SYNC` or `DIRECT_ACCESS`).",
30+
options: [
31+
"DELTA_SYNC",
32+
"DIRECT_ACCESS",
33+
],
34+
},
35+
primaryKey: {
36+
type: "string",
37+
label: "Primary Key",
38+
description: "The primary key column for the index.",
39+
},
40+
sourceTable: {
41+
type: "string",
42+
label: "Source Table",
43+
description:
44+
"The Delta table backing the index (required for `DELTA_SYNC`).",
45+
optional: true,
46+
},
47+
columnsToSync: {
48+
type: "string[]",
49+
label: "Columns to Sync",
50+
description:
51+
"List of columns to sync from the source Delta table. Example: `[\"id\", \"text\"]` (required for `DELTA_SYNC`).",
52+
optional: true,
53+
},
54+
embeddingSourceColumns: {
55+
type: "string[]",
56+
label: "Embedding Source Columns",
57+
description:
58+
"List of embedding source column configs. Each entry is a JSON object string like `{ \"embedding_model_endpoint_name\": \"e5-small-v2\", \"name\": \"text\" }`.Provide when Databricks computes embeddings (DELTA_SYNC).",
59+
optional: true,
60+
},
61+
schemaJson: {
62+
type: "string",
63+
label: "Schema JSON",
64+
description:
65+
"The schema of the index in JSON format. Example: `{ \"columns\": [{ \"name\": \"id\", \"type\": \"string\" }, { \"name\": \"text_vector\", \"type\": \"array<double>\" }] }`. Required for `DIRECT_ACCESS` indexes.",
66+
optional: true,
67+
},
68+
pipelineType: {
69+
type: "string",
70+
label: "Pipeline Type",
71+
description: "Pipeline type for syncing (default: TRIGGERED).",
72+
options: [
73+
"TRIGGERED",
74+
"CONTINUOUS",
75+
],
76+
optional: true,
77+
default: "TRIGGERED",
78+
},
79+
},
80+
81+
async run({ $ }) {
82+
const payload = {
83+
name: this.name,
84+
endpoint_name: this.endpointName,
85+
index_type: this.indexType,
86+
primary_key: this.primaryKey,
87+
};
88+
89+
if (this.indexType === "DELTA_SYNC") {
90+
if (this.schemaJson) {
91+
throw new ConfigurationError(
92+
"`Schema JSON` is not allowed when indexType is DELTA_SYNC.",
93+
);
94+
}
95+
if (!this.sourceTable) {
96+
throw new ConfigurationError(
97+
"sourceTable is required when indexType is DELTA_SYNC.",
98+
);
99+
}
100+
101+
const columnsToSync = Array.isArray(this.columnsToSync)
102+
? this.columnsToSync
103+
: utils.parseObject(this.columnsToSync);
104+
105+
const embeddingSourceColumns = utils.parseObject(this.embeddingSourceColumns);
106+
const hasSource = Array.isArray(embeddingSourceColumns) && embeddingSourceColumns.length > 0;
107+
if (!hasSource) {
108+
throw new ConfigurationError(
109+
"embeddingSourceColumns is required when indexType is DELTA_SYNC.",
110+
);
111+
}
112+
113+
const deltaSpec = {
114+
source_table: this.sourceTable,
115+
pipeline_type: this.pipelineType || "TRIGGERED",
116+
};
117+
if (Array.isArray(columnsToSync) && columnsToSync.length > 0) {
118+
deltaSpec.columns_to_sync = columnsToSync;
119+
}
120+
if (hasSource) {
121+
for (const [
122+
i,
123+
c,
124+
] of embeddingSourceColumns.entries()) {
125+
if (!c?.name || !c?.embedding_model_endpoint_name) {
126+
throw new ConfigurationError(
127+
`embeddingSourceColumns[${i}] must include "name" and "embedding_model_endpoint_name"`,
128+
);
129+
}
130+
}
131+
deltaSpec.embedding_source_columns = embeddingSourceColumns;
132+
}
133+
payload.delta_sync_index_spec = deltaSpec;
134+
}
135+
136+
else if (this.indexType === "DIRECT_ACCESS") {
137+
if (this.sourceTable || this.columnsToSync?.length || this.embeddingSourceColumns?.length) {
138+
throw new ConfigurationError(
139+
"`Source Table`,`Embedding Source Columns` and `Columns to Sync` are not allowed when indexType is DIRECT_ACCESS.",
140+
);
141+
}
142+
if (!this.schemaJson) {
143+
throw new ConfigurationError(
144+
"schemaJson is required when indexType is DIRECT_ACCESS.",
145+
);
146+
}
147+
payload.direct_access_index_spec = {
148+
schema_json: this.schemaJson,
149+
};
150+
}
151+
152+
const response = await this.databricks.createVectorSearchIndex({
153+
data: payload,
154+
$,
155+
});
156+
157+
$.export(
158+
"$summary",
159+
`Successfully created vector search index: ${response?.name || this.name}`,
160+
);
161+
return response;
162+
},
163+
};

components/databricks/actions/delete-endpoint/delete-endpoint.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ export default {
44
key: "databricks-delete-endpoint",
55
name: "Delete Endpoint",
66
description: "Delete a vector search endpoint. [See the documentation](https://docs.databricks.com/api/workspace/vectorsearchendpoints/deleteendpoint)",
7-
version: "0.0.2",
7+
version: "0.0.3",
88
type: "action",
99
props: {
1010
databricks,

components/databricks/actions/delete-job/delete-job.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ export default {
44
key: "databricks-delete-job",
55
name: "Delete Job",
66
description: "Delete a job. Deleted jobs cannot be recovered. [See the documentation](https://docs.databricks.com/api/workspace/jobs/delete)",
7-
version: "0.0.1",
7+
version: "0.0.2",
88
type: "action",
99
props: {
1010
app,

components/databricks/actions/delete-run/delete-run.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ export default {
44
key: "databricks-delete-run",
55
name: "Delete Run",
66
description: "Delete a non-active run. Returns an error if the run is active. [See the documentation](https://docs.databricks.com/api/workspace/jobs/deleterun)",
7-
version: "0.0.1",
7+
version: "0.0.2",
88
type: "action",
99
props: {
1010
app,

components/databricks/actions/delete-sql-warehouse/delete-sql-warehouse.mjs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ export default {
44
key: "databricks-delete-sql-warehouse",
55
name: "Delete SQL Warehouse",
66
description: "Deletes a SQL Warehouse by ID. [See the documentation](https://docs.databricks.com/api/workspace/warehouses/delete)",
7-
version: "0.0.3",
7+
version: "0.0.4",
88
type: "action",
99
props: {
1010
databricks,

0 commit comments

Comments
 (0)