Skip to content

Commit b95a04a

Browse files
benc-dbalexguo-db
andauthored
Migrating to pyproject.toml and Hatch (#853)
Co-authored-by: Alex Guo <[email protected]>
1 parent 97d6381 commit b95a04a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+438
-450
lines changed

.github/workflows/integration.yml

Lines changed: 36 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ concurrency:
1414
cancel-in-progress: true
1515

1616
jobs:
17-
run-tox-tests-uc-cluster:
17+
run-uc-cluster-e2e-tests:
1818
runs-on: ubuntu-latest
1919
environment: azure-prod
2020
env:
@@ -26,22 +26,26 @@ jobs:
2626
TEST_PECO_UC_CLUSTER_ID: ${{ secrets.TEST_PECO_UC_CLUSTER_ID }}
2727
steps:
2828
- name: Check out repository
29-
uses: actions/checkout@v3
29+
uses: actions/checkout@v4
30+
3031
- name: Set up python
3132
id: setup-python
32-
uses: actions/setup-python@v4
33+
uses: actions/setup-python@v5
3334
with:
34-
python-version: "3.10"
35+
python-version: "3.9"
36+
3537
- name: Get http path from environment
3638
run: python .github/workflows/build_cluster_http_path.py
3739
shell: sh
38-
- name: Install tox
40+
41+
- name: Install Hatch
3942
id: install-dependencies
40-
run: pip install tox
41-
- name: Run integration-uc-databricks-cluster
42-
run: [email protected] DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH=$DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET tox -e integration-databricks-uc-cluster
43+
uses: pypa/hatch@install
4344

44-
run-tox-tests-uc-sql:
45+
- name: Run UC Cluster Functional Tests
46+
run: [email protected] DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH=$DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET hatch -v run uc-cluster-e2e
47+
48+
run-sqlwarehouse-e2e-tests:
4549
runs-on: ubuntu-latest
4650
environment: azure-prod
4751
env:
@@ -54,22 +58,26 @@ jobs:
5458
TEST_PECO_UC_CLUSTER_ID: ${{ secrets.TEST_PECO_UC_CLUSTER_ID }}
5559
steps:
5660
- name: Check out repository
57-
uses: actions/checkout@v3
61+
uses: actions/checkout@v4
62+
5863
- name: Set up python
5964
id: setup-python
60-
uses: actions/setup-python@v4
65+
uses: actions/setup-python@v5
6166
with:
62-
python-version: "3.10"
67+
python-version: "3.9"
68+
6369
- name: Get http path from environment
6470
run: python .github/workflows/build_cluster_http_path.py
6571
shell: sh
66-
- name: Install tox
72+
73+
- name: Install Hatch
6774
id: install-dependencies
68-
run: pip install tox
69-
- name: Run integration-databricks-uc-sql-endpoint
70-
run: [email protected] DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH=$DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET tox -e integration-databricks-uc-sql-endpoint
75+
uses: pypa/hatch@install
76+
77+
- name: Run Sql Endpoint Functional Tests
78+
run: [email protected] DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH=$DBT_DATABRICKS_UC_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET hatch -v run sqlw-e2e
7179

72-
run-tox-tests-non-uc:
80+
run-cluster-e2e-tests:
7381
runs-on: ubuntu-latest
7482
environment: azure-prod
7583
env:
@@ -79,17 +87,21 @@ jobs:
7987
DBT_DATABRICKS_LOCATION_ROOT: ${{ secrets.TEST_PECO_EXTERNAL_LOCATION }}test
8088
steps:
8189
- name: Check out repository
82-
uses: actions/checkout@v3
90+
uses: actions/checkout@v4
91+
8392
- name: Set up python
8493
id: setup-python
85-
uses: actions/setup-python@v4
94+
uses: actions/setup-python@v5
8695
with:
87-
python-version: "3.10"
96+
python-version: "3.9"
97+
8898
- name: Get http path from environment
8999
run: python .github/workflows/build_cluster_http_path.py
90100
shell: sh
91-
- name: Install tox
101+
102+
- name: Install Hatch
92103
id: install-dependencies
93-
run: pip install tox
94-
- name: Run integration-databricks-cluster
95-
run: [email protected] DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_HTTP_PATH=$DBT_DATABRICKS_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET tox -e integration-databricks-cluster
104+
uses: pypa/hatch@install
105+
106+
- name: Run Cluster Functional Tests
107+
run: [email protected] DBT_DATABRICKS_LOCATION_ROOT=$DBT_DATABRICKS_LOCATION_ROOT DBT_DATABRICKS_HOST_NAME=$DBT_DATABRICKS_HOST_NAME DBT_DATABRICKS_HTTP_PATH=$DBT_DATABRICKS_CLUSTER_HTTP_PATH DBT_DATABRICKS_CLIENT_ID=$DBT_DATABRICKS_CLIENT_ID DBT_DATABRICKS_CLIENT_SECRET=$DBT_DATABRICKS_CLIENT_SECRET hatch -v run cluster-e2e

.github/workflows/main.yml

Lines changed: 23 additions & 127 deletions
Original file line numberDiff line numberDiff line change
@@ -42,40 +42,28 @@ defaults:
4242

4343
jobs:
4444
code-quality:
45-
name: ${{ matrix.toxenv }}
45+
name: Code Quality
4646

4747
runs-on: ubuntu-latest
4848
timeout-minutes: 10
4949

5050
strategy:
5151
fail-fast: false
52-
matrix:
53-
toxenv: [linter]
54-
55-
env:
56-
TOXENV: ${{ matrix.toxenv }}
57-
PYTEST_ADDOPTS: "-v --color=yes"
5852

5953
steps:
6054
- name: Check out the repository
61-
uses: actions/checkout@v3
62-
with:
63-
persist-credentials: false
55+
uses: actions/checkout@v4
6456

6557
- name: Set up Python
66-
uses: actions/setup-python@v4.3.0
58+
uses: actions/setup-python@v5
6759
with:
6860
python-version: "3.9"
6961

70-
- name: Install python dependencies
71-
run: |
72-
python -m pip install --user --upgrade pip
73-
python -m pip --version
74-
python -m pip install tox
75-
tox --version
62+
- name: Install Hatch
63+
uses: pypa/hatch@install
7664

77-
- name: Run tox
78-
run: tox
65+
- name: Run Code Quality
66+
run: hatch -v run code-quality
7967

8068
unit:
8169
name: unit test / python ${{ matrix.python-version }}
@@ -88,131 +76,39 @@ jobs:
8876
matrix:
8977
python-version: ["3.9", "3.10", "3.11", "3.12"]
9078

91-
env:
92-
TOXENV: "unit"
93-
PYTEST_ADDOPTS: "-v --color=yes --csv unit_results.csv"
94-
9579
steps:
9680
- name: Check out the repository
97-
uses: actions/checkout@v3
98-
with:
99-
persist-credentials: false
81+
uses: actions/checkout@v4
10082

10183
- name: Set up Python ${{ matrix.python-version }}
102-
uses: actions/setup-python@v4.3.0
84+
uses: actions/setup-python@v5
10385
with:
10486
python-version: ${{ matrix.python-version }}
10587

106-
- name: Install python dependencies
107-
run: |
108-
python -m pip install --user --upgrade pip
109-
python -m pip --version
110-
python -m pip install tox
111-
tox --version
112-
- name: Run tox
113-
run: tox
114-
115-
- name: Get current date
116-
if: always()
117-
id: date
118-
run: echo "::set-output name=date::$(date +'%Y-%m-%dT%H_%M_%S')" #no colons allowed for artifacts
119-
120-
- uses: actions/upload-artifact@v4
121-
if: always()
122-
with:
123-
name: unit_results_${{ matrix.python-version }}-${{ steps.date.outputs.date }}.csv
124-
path: unit_results.csv
88+
- name: Install Hatch
89+
uses: pypa/hatch@install
12590

126-
build:
127-
name: build packages
91+
- name: Run Unit Tests
92+
run: hatch run -v +py=${{ matrix.python-version }} test:unit
12893

94+
build:
95+
name: Build and Verify Packages
12996
runs-on: ubuntu-latest
13097

131-
outputs:
132-
is_alpha: ${{ steps.check-is-alpha.outputs.is_alpha }}
133-
13498
steps:
13599
- name: Check out the repository
136-
uses: actions/checkout@v3
100+
uses: actions/checkout@v4
137101

138102
- name: Set up Python
139-
uses: actions/setup-python@v4.3.0
103+
uses: actions/setup-python@v5
140104
with:
141105
python-version: "3.9"
142106

143-
- name: Install python dependencies
144-
run: |
145-
python -m pip install --user --upgrade pip
146-
python -m pip install --upgrade setuptools wheel twine check-wheel-contents
147-
python -m pip --version
148-
- name: Build distributions
149-
run: ./scripts/build-dist.sh
150-
151-
- name: Show distributions
152-
run: ls -lh dist/
153-
154-
- name: Check distribution descriptions
155-
run: |
156-
twine check dist/*
157-
- name: Check wheel contents
158-
run: |
159-
check-wheel-contents dist/*.whl --ignore W007,W008
160-
161-
- name: Check if this is an alpha version
162-
id: check-is-alpha
163-
run: |
164-
export is_alpha=0
165-
if [[ "$(ls -lh dist/)" == *"a1"* ]]; then export is_alpha=1; fi
166-
echo "::set-output name=is_alpha::$is_alpha"
167-
168-
- uses: actions/upload-artifact@v3
169-
with:
170-
name: dist
171-
path: dist/
172-
173-
test-build:
174-
name: verify packages / python ${{ matrix.python-version }} / ${{ matrix.os }}
175-
176-
if: needs.build.outputs.is_alpha == 0
107+
- name: Install Hatch
108+
uses: pypa/hatch@install
177109

178-
needs: build
179-
180-
runs-on: ${{ matrix.os }}
181-
182-
strategy:
183-
fail-fast: false
184-
matrix:
185-
os: [ubuntu-latest, macos-latest, windows-latest]
186-
python-version: ["3.9", "3.10", "3.11", "3.12"]
187-
188-
steps:
189-
- name: Set up Python ${{ matrix.python-version }}
190-
uses: actions/[email protected]
191-
with:
192-
python-version: ${{ matrix.python-version }}
110+
- name: Build distributions
111+
run: hatch -v build
193112

194-
- name: Install python dependencies
195-
run: |
196-
python -m pip install --user --upgrade pip
197-
python -m pip install --upgrade wheel
198-
python -m pip --version
199-
- uses: actions/download-artifact@v3
200-
with:
201-
name: dist
202-
path: dist/
203-
204-
- name: Show distributions
205-
run: ls -lh dist/
206-
207-
- name: Install wheel distributions
208-
run: |
209-
find ./dist/*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
210-
- name: Check wheel distributions
211-
run: |
212-
dbt --version
213-
- name: Install source distributions
214-
run: |
215-
find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/
216-
- name: Check source distributions
217-
run: |
218-
dbt --version
113+
- name: Verify distributions
114+
run: hatch run verify:check-all

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,3 +19,4 @@ logs/
1919
.venv*
2020
*.sublime*
2121
.python-version
22+
.hatch

.pre-commit-config.yaml

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
repos:
2+
- repo: https://github.com/astral-sh/ruff-pre-commit
3+
# Ruff version.
4+
rev: v0.8.0
5+
hooks:
6+
# Run the linter.
7+
- id: ruff
8+
# Run the formatter.
9+
- id: ruff-format
10+
11+
- repo: https://github.com/pre-commit/mirrors-mypy
12+
rev: v1.13.0
13+
hooks:
14+
- id: mypy
15+
additional_dependencies: [types-requests]

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
- Fix behavior flag use in init of DatabricksAdapter (thanks @VersusFacit!) ([836](https://github.com/databricks/dbt-databricks/pull/836))
3535
- Restrict pydantic to V1 per dbt Labs' request ([843](https://github.com/databricks/dbt-databricks/pull/843))
3636
- Switching to Ruff for formatting and linting ([847](https://github.com/databricks/dbt-databricks/pull/847))
37+
- Switching to Hatch and pyproject.toml for project config ([853](https://github.com/databricks/dbt-databricks/pull/853))
3738

3839
## dbt-databricks 1.8.7 (October 10, 2024)
3940

MANIFEST.in

Lines changed: 0 additions & 1 deletion
This file was deleted.
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
version: str = "1.9.0b1"
1+
version = "1.9.0b1"

dbt/adapters/databricks/impl.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@
109109
@dataclass
110110
class DatabricksConfig(AdapterConfig):
111111
file_format: str = "delta"
112-
table_format: TableFormat = TableFormat.DEFAULT
112+
table_format: str = TableFormat.DEFAULT
113113
location_root: Optional[str] = None
114114
include_full_name_in_path: bool = False
115115
partition_by: Optional[Union[list[str], str]] = None
@@ -764,7 +764,7 @@ class RelationAPIBase(ABC, Generic[DatabricksRelationConfig]):
764764
For the most part, these are just namespaces to group related methods together.
765765
"""
766766

767-
relation_type: ClassVar[DatabricksRelationType]
767+
relation_type: ClassVar[str]
768768

769769
@classmethod
770770
@abstractmethod

dbt/adapters/databricks/logging.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,18 @@
1-
import logging
21
import os
2+
from logging import Handler, LogRecord, getLogger
33
from typing import Union
44

55
from dbt.adapters.events.logging import AdapterLogger
66

77
logger = AdapterLogger("Databricks")
88

99

10-
class DbtCoreHandler(logging.Handler):
10+
class DbtCoreHandler(Handler):
1111
def __init__(self, level: Union[str, int], dbt_logger: AdapterLogger):
1212
super().__init__(level=level)
1313
self.logger = dbt_logger
1414

15-
def emit(self, record: logging.LogRecord) -> None:
15+
def emit(self, record: LogRecord) -> None:
1616
# record.levelname will be debug, info, warning, error, or critical
1717
# these map 1-to-1 with methods of the AdapterLogger
1818
log_func = getattr(self.logger, record.levelname.lower())
@@ -21,7 +21,7 @@ def emit(self, record: logging.LogRecord) -> None:
2121

2222
dbt_adapter_logger = AdapterLogger("databricks-sql-connector")
2323

24-
pysql_logger = logging.getLogger("databricks.sql")
24+
pysql_logger = getLogger("databricks.sql")
2525
pysql_logger_level = os.environ.get("DBT_DATABRICKS_CONNECTOR_LOG_LEVEL", "WARN").upper()
2626
pysql_logger.setLevel(pysql_logger_level)
2727

0 commit comments

Comments
 (0)