Skip to content

Commit ed687f5

Browse files
Revert "presets-catalog-schema-as-params # Your branch is ahead of 'origin/presets-catalog-schema-as-params' by 67 commits. # (use "git push" to publish your"
This reverts commit be08585.
1 parent be08585 commit ed687f5

File tree

9 files changed

+216
-41
lines changed

9 files changed

+216
-41
lines changed

libs/template/templates/dbt-sql/databricks_template_schema.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
"default": "default",
4646
"pattern": "^\\w+$",
4747
"pattern_match_failure_message": "Invalid schema name.",
48-
"description": "\nPlease provide an initial schema during development.\ndefault_schema",
48+
"description": "\nPlease provide a default schema during development.\ndefault_schema",
4949
"order": 5
5050
}
5151
},

libs/template/templates/default-python/databricks_template_schema.json

Lines changed: 37 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
"project_name": {
55
"type": "string",
66
"default": "my_project",
7-
"description": "Please provide the following details to tailor the template to your preferences.\n\nUnique name for this project",
7+
"description": "\nPlease provide a unique name for this project.\nproject_name",
88
"order": 1,
99
"pattern": "^[A-Za-z0-9_]+$",
1010
"pattern_match_failure_message": "Name must consist of letters, numbers, and underscores."
@@ -13,23 +13,55 @@
1313
"type": "string",
1414
"default": "yes",
1515
"enum": ["yes", "no"],
16-
"description": "Include a stub (sample) notebook in '{{.project_name}}{{path_separator}}src'",
16+
"description": "\nWould you like to include a stub (sample) notebook in '{{.project_name}}{{path_separator}}src'?",
1717
"order": 2
1818
},
1919
"include_dlt": {
2020
"type": "string",
2121
"default": "yes",
2222
"enum": ["yes", "no"],
23-
"description": "Include a stub (sample) Delta Live Tables pipeline in '{{.project_name}}{{path_separator}}src'",
23+
"description": "Would you like to include a stub (sample) Delta Live Tables pipeline in '{{.project_name}}{{path_separator}}src'?",
2424
"order": 3
2525
},
2626
"include_python": {
2727
"type": "string",
2828
"default": "yes",
2929
"enum": ["yes", "no"],
30-
"description": "Include a stub (sample) Python package in '{{.project_name}}{{path_separator}}src'",
30+
"description": "Would you like to include a stub (sample) Python package in '{{.project_name}}{{path_separator}}src'?",
3131
"order": 4
32+
},
33+
"default_catalog": {
34+
"type": "string",
35+
"default": "{{default_catalog}}",
36+
"pattern": "^\\w*$",
37+
"pattern_match_failure_message": "Invalid catalog name.",
38+
"description": "\nPlease provide an initial catalog{{if eq (default_catalog) \"\"}} (leave blank when not using Unity Catalog){{end}}.\ndefault_catalog",
39+
"order": 5
40+
},
41+
"personal_schemas": {
42+
"type": "string",
43+
"description": "\nWould you like to use a personal schema for each user working on this project? (e.g., 'catalog.{{short_name}}')\npersonal_schemas",
44+
"enum": [
45+
"yes, use a schema based on the current user name during development",
46+
"no, use a shared schema during development"
47+
],
48+
"order": 6
49+
},
50+
"shared_schema": {
51+
"skip_prompt_if": {
52+
"properties": {
53+
"personal_schemas": {
54+
"const": "yes, use a schema based on the current user name during development"
55+
}
56+
}
57+
},
58+
"type": "string",
59+
"default": "default",
60+
"pattern": "^\\w+$",
61+
"pattern_match_failure_message": "Invalid schema name.",
62+
"description": "\nPlease provide default schema during development.\ndefault_schema",
63+
"order": 7
3264
}
3365
},
34-
"success_message": "Workspace to use (auto-detected, edit in '{{.project_name}}/databricks.yml'): {{workspace_host}}\n\n✨ Your new project has been created in the '{{.project_name}}' directory!\n\nPlease refer to the README.md file for \"getting started\" instructions.\nSee also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html."
66+
"success_message": "\nWorkspace to use (auto-detected, edit in '{{.project_name}}/databricks.yml').\nworkspace_host: {{workspace_host}}\n\n✨ Your new project has been created in the '{{.project_name}}' directory!\n\nPlease refer to the README.md file for \"getting started\" instructions.\nSee also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html."
3567
}

libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,13 @@ bundle:
66
include:
77
- resources/*.yml
88
9+
{{- $dev_schema := .shared_schema }}
10+
{{- $prod_schema := .shared_schema }}
11+
{{- if (regexp "^yes").MatchString .personal_schemas}}
12+
{{- $dev_schema = "${workspace.current_user.short_name}"}}
13+
{{- $prod_schema = "default"}}
14+
{{- end}}
15+
916
targets:
1017
dev:
1118
# The default target uses 'mode: development' to create a development copy.
@@ -16,6 +23,9 @@ targets:
1623
default: true
1724
workspace:
1825
host: {{workspace_host}}
26+
presets:
27+
catalog: {{.default_catalog}}
28+
schema: {{$dev_schema}}
1929
2030
prod:
2131
mode: production
@@ -26,5 +36,6 @@ targets:
2636
permissions:
2737
- {{if is_service_principal}}service_principal{{else}}user{{end}}_name: {{user_name}}
2838
level: CAN_MANAGE
29-
run_as:
30-
{{if is_service_principal}}service_principal{{else}}user{{end}}_name: {{user_name}}
39+
presets:
40+
catalog: {{.default_catalog}}
41+
schema: {{$prod_schema}}

libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}.job.yml.tmpl

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,16 +16,12 @@ resources:
1616
interval: 1
1717
unit: DAYS
1818

19-
{{- if not is_service_principal}}
20-
19+
{{if not is_service_principal -}}
2120
email_notifications:
2221
on_failure:
2322
- {{user_name}}
2423

25-
{{else}}
26-
2724
{{end -}}
28-
2925
tasks:
3026
{{- if eq .include_notebook "yes" }}
3127
- task_key: notebook_task

libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}.pipeline.yml.tmpl

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,6 @@ resources:
33
pipelines:
44
{{.project_name}}_pipeline:
55
name: {{.project_name}}_pipeline
6-
{{- if or (eq default_catalog "") (eq default_catalog "hive_metastore")}}
7-
## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog:
8-
# catalog: catalog_name
9-
{{- else}}
10-
catalog: {{default_catalog}}
11-
{{- end}}
12-
target: {{.project_name}}_${bundle.target}
136
libraries:
147
- notebook:
158
path: ../src/dlt_pipeline.ipynb

libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb.tmpl

Lines changed: 72 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -27,15 +27,25 @@
2727
},
2828
"outputs": [],
2929
"source": [
30-
{{- if (eq .include_python "yes") }}
30+
{{- if (eq .include_python "yes") }}
3131
"import sys\n",
3232
"sys.path.append('../src')\n",
3333
"from {{.project_name}} import main\n",
3434
"\n",
35-
"main.get_taxis(spark).show(10)"
36-
{{else}}
37-
"spark.range(10)"
38-
{{end -}}
35+
{{- /* We can use the short form here without 'dbutils.text()' since the widgets are defined in the metadata below. */}}
36+
"catalog = dbutils.widgets.get('catalog')\n",
37+
"schema = dbutils.widgets.get('schema')\n",
38+
"spark.sql(f'USE {catalog}.{schema}')\n",
39+
"\n",
40+
"spark.sql('SELECT * FROM example').show(10)"
41+
{{- else}}
42+
"# Load default catalog and schema as widget and set their values as the default catalog / schema\n",
43+
"catalog = dbutils.widgets.get('catalog')\n",
44+
"schema = dbutils.widgets.get('schema')\n",
45+
"spark.sql(f'USE {catalog}.{schema}')\n",
46+
"\n",
47+
"spark.sql('SELECT * FROM example').show(10)"
48+
{{- end}}
3949
]
4050
}
4151
],
@@ -46,8 +56,63 @@
4656
"notebookMetadata": {
4757
"pythonIndentUnit": 2
4858
},
49-
"notebookName": "ipynb-notebook",
50-
"widgets": {}
59+
"notebookName": "exploration",
60+
"widgets": {
61+
"catalog": {
62+
"currentValue": "{{.default_catalog}}",
63+
"nuid": "c47e96d8-5751-4c8a-9d6b-5c6c7c3f1234",
64+
"typedWidgetInfo": {
65+
"autoCreated": false,
66+
"defaultValue": "{{.default_catalog}}",
67+
"label": null,
68+
"name": "catalog",
69+
"options": {
70+
"widgetDisplayType": "Text",
71+
"validationRegex": null
72+
},
73+
"parameterDataType": "String"
74+
},
75+
"widgetInfo": {
76+
"widgetType": "text",
77+
"defaultValue": "{{.default_catalog}}",
78+
"label": null,
79+
"name": "catalog",
80+
"options": {
81+
"widgetType": "text",
82+
"autoCreated": null,
83+
"validationRegex": null
84+
}
85+
}
86+
},
87+
{{- $dev_schema := .shared_schema }}
88+
{{- if (regexp "^yes").MatchString .personal_schemas}}
89+
{{- $dev_schema = "{{short_name}}"}}
90+
{{- end}}
91+
"schema": {
92+
"currentValue": "{{$dev_schema}}",
93+
"nuid": "c47e96d8-5751-4c8a-9d6b-5c6c7c3f5678",
94+
"typedWidgetInfo": {
95+
"autoCreated": false,
96+
"defaultValue": "{{$dev_schema}}",
97+
"label": null,
98+
"name": "schema",
99+
"options": {
100+
"widgetDisplayType": "Text",
101+
"validationRegex": null
102+
},
103+
"parameterDataType": "String"
104+
},
105+
"widgetInfo": {
106+
"widgetType": "text",
107+
"defaultValue": "{{$dev_schema}}",
108+
"label": null,
109+
"name": "schema",
110+
"options": {
111+
"widgetType": "text",
112+
"autoCreated": null,
113+
"validationRegex": null
114+
}
115+
}
51116
},
52117
"kernelspec": {
53118
"display_name": "Python 3",

libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl

Lines changed: 65 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,11 @@
2323
"metadata": {},
2424
"outputs": [],
2525
"source": [
26-
"%load_ext autoreload\n",
27-
"%autoreload 2"
26+
"# Load default catalog and schema as widget and set their values as the default catalog / schema\n",
27+
{{- /* We can use the short form here without 'dbutils.text()' since the widgets are defined in the metadata below. */}}
28+
"catalog = dbutils.widgets.get('catalog')\n",
29+
"schema = dbutils.widgets.get('schema')\n",
30+
"spark.sql(f'USE {catalog}.{schema}')"
2831
]
2932
},
3033
{
@@ -47,9 +50,9 @@
4750
{{- if (eq .include_python "yes") }}
4851
"from {{.project_name}} import main\n",
4952
"\n",
50-
"main.get_taxis(spark).show(10)"
53+
"main.create_example_table()"
5154
{{else}}
52-
"spark.range(10)"
55+
"spark.sql("CREATE OR REPLACE TABLE example AS SELECT 'example table' AS text_column")"
5356
{{end -}}
5457
]
5558
}
@@ -62,7 +65,64 @@
6265
"pythonIndentUnit": 2
6366
},
6467
"notebookName": "notebook",
65-
"widgets": {}
68+
"widgets": {
69+
"catalog": {
70+
"currentValue": "{{.default_catalog}}",
71+
"nuid": "3965fc9c-8080-45b1-bee3-f75cef7685b4",
72+
"typedWidgetInfo": {
73+
"autoCreated": false,
74+
"defaultValue": "{{.default_catalog}}",
75+
"label": null,
76+
"name": "catalog",
77+
"options": {
78+
"widgetDisplayType": "Text",
79+
"validationRegex": null
80+
},
81+
"parameterDataType": "String"
82+
},
83+
"widgetInfo": {
84+
"widgetType": "text",
85+
"defaultValue": "{{.default_catalog}}",
86+
"label": null,
87+
"name": "catalog",
88+
"options": {
89+
"widgetType": "text",
90+
"autoCreated": null,
91+
"validationRegex": null
92+
}
93+
}
94+
},
95+
{{- $dev_schema := .shared_schema }}
96+
{{- if (regexp "^yes").MatchString .personal_schemas}}
97+
{{- $dev_schema = "{{short_name}}"}}
98+
{{- end}}
99+
"schema": {
100+
"currentValue": "{{$dev_schema}}",
101+
"nuid": "6ec0d70f-39bf-4859-a510-02c3e3d59bff",
102+
"typedWidgetInfo": {
103+
"autoCreated": false,
104+
"defaultValue": "{{$dev_schema}}",
105+
"label": null,
106+
"name": "schema",
107+
"options": {
108+
"widgetDisplayType": "Text",
109+
"validationRegex": null
110+
},
111+
"parameterDataType": "String"
112+
},
113+
"widgetInfo": {
114+
"widgetType": "text",
115+
"defaultValue": "{{$dev_schema}}",
116+
"label": null,
117+
"name": "schema",
118+
"options": {
119+
"widgetType": "text",
120+
"autoCreated": null,
121+
"validationRegex": null
122+
}
123+
}
124+
}
125+
}
66126
},
67127
"kernelspec": {
68128
"display_name": "Python 3",
Lines changed: 26 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,39 @@
11
from pyspark.sql import SparkSession, DataFrame
2+
import argparse
23

3-
def get_taxis(spark: SparkSession) -> DataFrame:
4-
return spark.read.table("samples.nyctaxi.trips")
5-
6-
7-
# Create a new Databricks Connect session. If this fails,
8-
# check that you have configured Databricks Connect correctly.
9-
# See https://docs.databricks.com/dev-tools/databricks-connect.html.
104
def get_spark() -> SparkSession:
5+
"""
6+
Create a new Databricks Connect session. If this fails,
7+
check that you have configured Databricks Connect correctly.
8+
See https://docs.databricks.com/dev-tools/databricks-connect.html.
9+
"""
1110
try:
1211
from databricks.connect import DatabricksSession
1312
return DatabricksSession.builder.getOrCreate()
1413
except ImportError:
1514
return SparkSession.builder.getOrCreate()
1615

16+
def get_taxis(spark: SparkSession) -> DataFrame:
17+
return spark.read.table("samples.nyctaxi.trips")
18+
19+
def create_example_table():
20+
"""
21+
Create a table called 'example' in the default catalog and schema.
22+
"""
23+
get_spark().sql("CREATE OR REPLACE TABLE example AS SELECT 'example table' AS text_column")
24+
1725
def main():
18-
get_taxis(get_spark()).show(5)
26+
# Set the catalog and schema for the current session.
27+
# In the default template, these parameters are set
28+
# using the 'catalog' and 'schema' presets in databricks.yml.
29+
parser = argparse.ArgumentParser()
30+
parser.add_argument('--catalog', required=True)
31+
parser.add_argument('--schema', required=True)
32+
args, unknown = parser.parse_known_args()
33+
spark = get_spark()
34+
spark.sql(f"USE {args.catalog}.{args.schema}")
35+
36+
create_example_table()
1937

2038
if __name__ == '__main__':
2139
main()

libs/template/templates/default-sql/databricks_template_schema.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
"default": "default",
4646
"pattern": "^\\w+$",
4747
"pattern_match_failure_message": "Invalid schema name.",
48-
"description": "\nPlease provide an initial schema during development.\ndefault_schema",
48+
"description": "\nPlease provide a default schema during development.\ndefault_schema",
4949
"order": 5
5050
}
5151
},

0 commit comments

Comments
 (0)