Skip to content

Commit 6d5cb1d

Browse files
Add volumes and clean room notebooks
1 parent f3923da commit 6d5cb1d

File tree

2 files changed

+53
-20
lines changed

2 files changed

+53
-20
lines changed

bundle/config/mutator/apply_presets_catalog_schema.go

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -211,6 +211,19 @@ func (m *applyPresetsCatalogSchema) Apply(ctx context.Context, b *bundle.Bundle)
211211
}
212212
}
213213

214+
// Volumes
215+
for _, v := range r.Volumes {
216+
if v.CreateVolumeRequestContent == nil {
217+
continue
218+
}
219+
if v.CatalogName == "" {
220+
v.CatalogName = p.Catalog
221+
}
222+
if v.SchemaName == "" {
223+
v.SchemaName = p.Schema
224+
}
225+
}
226+
214227
return diags
215228
}
216229

bundle/config/mutator/apply_presets_catalog_schema_test.go

Lines changed: 40 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ package mutator_test
33
import (
44
"context"
55
"reflect"
6+
"regexp"
67
"strings"
78
"testing"
89

@@ -144,6 +145,14 @@ func mockPresetsCatalogSchema() *bundle.Bundle {
144145
},
145146
},
146147
},
148+
Volumes: map[string]*resources.Volume{
149+
"key": {
150+
CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{
151+
CatalogName: "<catalog>",
152+
SchemaName: "<schema>",
153+
},
154+
},
155+
},
147156
},
148157
Presets: config.Presets{
149158
Catalog: "my_catalog",
@@ -155,23 +164,24 @@ func mockPresetsCatalogSchema() *bundle.Bundle {
155164

156165
// ignoredFields are fields that should be ignored in the completeness check
157166
var ignoredFields = map[string]string{
158-
"resources.pipelines.key.schema": "schema is still in private preview",
159-
"resources.jobs.key.tasks[0].notebook_task.base_parameters": "catalog/schema are passed via job parameters",
160-
"resources.jobs.key.tasks[0].python_wheel_task.named_parameters": "catalog/schema are passed via job parameters",
161-
"resources.jobs.key.tasks[0].python_wheel_task.parameters": "catalog/schema are passed via job parameters",
162-
"resources.jobs.key.tasks[0].run_job_task.job_parameters": "catalog/schema are passed via job parameters",
163-
"resources.jobs.key.tasks[0].spark_jar_task.parameters": "catalog/schema are passed via job parameters",
164-
"resources.jobs.key.tasks[0].spark_python_task.parameters": "catalog/schema are passed via job parameters",
165-
"resources.jobs.key.tasks[0].spark_submit_task.parameters": "catalog/schema are passed via job parameters",
166-
"resources.jobs.key.tasks[0].sql_task.parameters": "catalog/schema are passed via job parameters",
167-
"resources.jobs.key.tasks[0].run_job_task.jar_params": "catalog/schema are passed via job parameters",
168-
"resources.jobs.key.tasks[0].run_job_task.notebook_params": "catalog/schema are passed via job parameters",
169-
"resources.jobs.key.tasks[0].run_job_task.pipeline_params": "catalog/schema are passed via job parameters",
170-
"resources.jobs.key.tasks[0].run_job_task.python_named_params": "catalog/schema are passed via job parameters",
171-
"resources.jobs.key.tasks[0].run_job_task.python_params": "catalog/schema are passed via job parameters",
172-
"resources.jobs.key.tasks[0].run_job_task.spark_submit_params": "catalog/schema are passed via job parameters",
173-
"resources.jobs.key.tasks[0].run_job_task.sql_params": "catalog/schema are passed via job parameters",
174-
"resources.pipelines.key.ingestion_definition.objects[0].schema": "schema name is under schema.source_schema/destination_schema",
167+
"resources.pipelines.key.schema": "schema is still in private preview",
168+
"resources.jobs.key.tasks[0].notebook_task.base_parameters": "catalog/schema are passed via job parameters",
169+
"resources.jobs.key.tasks[0].python_wheel_task.named_parameters": "catalog/schema are passed via job parameters",
170+
"resources.jobs.key.tasks[0].python_wheel_task.parameters": "catalog/schema are passed via job parameters",
171+
"resources.jobs.key.tasks[0].run_job_task.job_parameters": "catalog/schema are passed via job parameters",
172+
"resources.jobs.key.tasks[0].spark_jar_task.parameters": "catalog/schema are passed via job parameters",
173+
"resources.jobs.key.tasks[0].spark_python_task.parameters": "catalog/schema are passed via job parameters",
174+
"resources.jobs.key.tasks[0].spark_submit_task.parameters": "catalog/schema are passed via job parameters",
175+
"resources.jobs.key.tasks[0].sql_task.parameters": "catalog/schema are passed via job parameters",
176+
"resources.jobs.key.tasks[0].run_job_task.jar_params": "catalog/schema are passed via job parameters",
177+
"resources.jobs.key.tasks[0].run_job_task.notebook_params": "catalog/schema are passed via job parameters",
178+
"resources.jobs.key.tasks[0].run_job_task.pipeline_params": "catalog/schema are passed via job parameters",
179+
"resources.jobs.key.tasks[0].run_job_task.python_named_params": "catalog/schema are passed via job parameters",
180+
"resources.jobs.key.tasks[0].run_job_task.python_params": "catalog/schema are passed via job parameters",
181+
"resources.jobs.key.tasks[0].run_job_task.spark_submit_params": "catalog/schema are passed via job parameters",
182+
"resources.jobs.key.tasks[0].run_job_task.sql_params": "catalog/schema are passed via job parameters",
183+
"resources.jobs.key.tasks[0].clean_rooms_notebook_task.notebook_base_parameters": "catalog/schema are properties inside this struct",
184+
"resources.pipelines.key.ingestion_definition.objects[0].schema": "schema name is under schema.source_schema/destination_schema",
175185
"resources.schemas": "schema name of schemas is under resources.schemas.key.Name",
176186
}
177187

@@ -236,11 +246,21 @@ func TestApplyPresetsCatalogSchemaCompleteness(t *testing.T) {
236246

237247
// Convert the recordedFields to a set for easier lookup
238248
recordedPaths := make(map[string]struct{})
249+
arrayIndexPattern := regexp.MustCompile(`\[\d+\]`)
239250
for _, field := range recordedFields {
240251
recordedPaths[field.PathString] = struct{}{}
241-
if i := strings.Index(field.PathString, "["); i >= 0 {
242-
// For entries like resources.jobs.key.parameters[1].default, just add resources.jobs.key.parameters
243-
recordedPaths[field.PathString[:i]] = struct{}{}
252+
253+
// Add base paths for any array indices in the path.
254+
// For example, for resources.jobs.key.parameters[0].default we add "resources.jobs.key.parameters
255+
path := field.PathString
256+
path = arrayIndexPattern.ReplaceAllString(path, "[0]")
257+
for {
258+
i := strings.Index(path, "[")
259+
if i < 0 {
260+
break
261+
}
262+
recordedPaths[path[:i]] = struct{}{}
263+
path = path[i+1:]
244264
}
245265
}
246266

0 commit comments

Comments
 (0)