Skip to content

Commit 9d51b93

Browse files
committed
Chore: update databricks and snowflake auth in integration tests
1 parent c668eef commit 9d51b93

File tree

8 files changed

+64
-59
lines changed

8 files changed

+64
-59
lines changed

.circleci/continue_config.yml

Lines changed: 26 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -258,6 +258,10 @@ jobs:
258258
echo "export REDSHIFT_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
259259
echo "export GCP_POSTGRES_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
260260
echo "export FABRIC_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
261+
262+
# Make snowflake private key available
263+
echo $SNOWFLAKE_PRIVATE_KEY_RAW | base64 -d > /tmp/snowflake-keyfile.p8
264+
echo "export SNOWFLAKE_PRIVATE_KEY_FILE='/tmp/snowflake-keyfile.p8'" >> "$BASH_ENV"
261265
- run:
262266
name: Create test database
263267
command: ./.circleci/manage-test-db.sh << parameters.engine >> "$TEST_DB_NAME" up
@@ -282,24 +286,24 @@ workflows:
282286
parameters:
283287
python_version:
284288
- "3.9"
285-
- "3.10"
286-
- "3.11"
287-
- "3.12"
288-
- "3.13"
289+
#- "3.10"
290+
#- "3.11"
291+
#- "3.12"
292+
#- "3.13"
289293
- cicd_tests_windows
290294
- engine_tests_docker:
291295
name: engine_<< matrix.engine >>
292296
matrix:
293297
parameters:
294298
engine:
295299
- duckdb
296-
- postgres
297-
- mysql
298-
- mssql
299-
- trino
300-
- spark
301-
- clickhouse
302-
- risingwave
300+
#- postgres
301+
#- mysql
302+
#- mssql
303+
#- trino
304+
#- spark
305+
#- clickhouse
306+
#- risingwave
303307
- engine_tests_cloud:
304308
name: cloud_engine_<< matrix.engine >>
305309
context:
@@ -309,18 +313,18 @@ workflows:
309313
matrix:
310314
parameters:
311315
engine:
312-
#- snowflake
316+
- snowflake
313317
- databricks
314-
- redshift
315-
- bigquery
316-
- clickhouse-cloud
317-
- athena
318-
- fabric
319-
- gcp-postgres
320-
filters:
321-
branches:
322-
only:
323-
- main
318+
#- redshift
319+
#- bigquery
320+
#- clickhouse-cloud
321+
#- athena
322+
#- fabric
323+
#- gcp-postgres
324+
#filters:
325+
# branches:
326+
# only:
327+
# - main
324328
- ui_style
325329
- ui_test
326330
- vscode_test

.circleci/manage-test-db.sh

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ function_exists() {
2525
# Snowflake
2626
snowflake_init() {
2727
echo "Installing Snowflake CLI"
28-
pip install "snowflake-cli-labs<3.8.0"
28+
pip install "snowflake-cli"
2929
}
3030

3131
snowflake_up() {
@@ -40,20 +40,6 @@ snowflake_down() {
4040
databricks_init() {
4141
echo "Installing Databricks CLI"
4242
curl -fsSL https://raw.githubusercontent.com/databricks/setup-cli/main/install.sh | sudo sh || true
43-
44-
echo "Writing out Databricks CLI config file"
45-
echo -e "[DEFAULT]\nhost = $DATABRICKS_SERVER_HOSTNAME\ntoken = $DATABRICKS_ACCESS_TOKEN" > ~/.databrickscfg
46-
47-
# this takes a path like 'sql/protocolv1/o/2934659247569/0723-005339-foobar' and extracts '0723-005339-foobar' from it
48-
CLUSTER_ID=${DATABRICKS_HTTP_PATH##*/}
49-
50-
echo "Extracted cluster id: $CLUSTER_ID from '$DATABRICKS_HTTP_PATH'"
51-
52-
# Note: the cluster doesnt need to be running to create / drop catalogs, but it does need to be running to run the integration tests
53-
echo "Ensuring cluster is running"
54-
# the || true is to prevent the following error from causing an abort:
55-
# > Error: is in unexpected state Running.
56-
databricks clusters start $CLUSTER_ID || true
5743
}
5844

5945
databricks_up() {

Makefile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -212,14 +212,14 @@ risingwave-test: engine-risingwave-up
212212
# Cloud Engines #
213213
#################
214214

215-
snowflake-test: guard-SNOWFLAKE_ACCOUNT guard-SNOWFLAKE_WAREHOUSE guard-SNOWFLAKE_DATABASE guard-SNOWFLAKE_USER guard-SNOWFLAKE_PASSWORD engine-snowflake-install
215+
snowflake-test: guard-SNOWFLAKE_ACCOUNT guard-SNOWFLAKE_WAREHOUSE guard-SNOWFLAKE_DATABASE guard-SNOWFLAKE_USER engine-snowflake-install
216216
pytest -n auto -m "snowflake" --reruns 3 --junitxml=test-results/junit-snowflake.xml
217217

218218
bigquery-test: guard-BIGQUERY_KEYFILE engine-bigquery-install
219219
$(PIP) install -e ".[bigframes]"
220220
pytest -n auto -m "bigquery" --reruns 3 --junitxml=test-results/junit-bigquery.xml
221221

222-
databricks-test: guard-DATABRICKS_CATALOG guard-DATABRICKS_SERVER_HOSTNAME guard-DATABRICKS_HTTP_PATH guard-DATABRICKS_ACCESS_TOKEN guard-DATABRICKS_CONNECT_VERSION engine-databricks-install
222+
databricks-test: guard-DATABRICKS_CATALOG guard-DATABRICKS_SERVER_HOSTNAME guard-DATABRICKS_HTTP_PATH guard-DATABRICKS_CONNECT_VERSION engine-databricks-install
223223
$(PIP) install 'databricks-connect==${DATABRICKS_CONNECT_VERSION}'
224224
pytest -n auto -m "databricks" --reruns 3 --junitxml=test-results/junit-databricks.xml
225225

sqlmesh/core/engine_adapter/databricks.py

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -78,21 +78,21 @@ def can_access_databricks_connect(cls, disable_databricks_connect: bool) -> bool
7878
def _use_spark_session(self) -> bool:
7979
if self.can_access_spark_session(bool(self._extra_config.get("disable_spark_session"))):
8080
return True
81-
return (
82-
self.can_access_databricks_connect(
83-
bool(self._extra_config.get("disable_databricks_connect"))
84-
)
85-
and (
86-
{
87-
"databricks_connect_server_hostname",
88-
"databricks_connect_access_token",
89-
}.issubset(self._extra_config)
90-
)
91-
and (
92-
"databricks_connect_cluster_id" in self._extra_config
93-
or "databricks_connect_use_serverless" in self._extra_config
94-
)
95-
)
81+
82+
if self.can_access_databricks_connect(
83+
bool(self._extra_config.get("disable_databricks_connect"))
84+
):
85+
if self._extra_config.get("databricks_connect_use_serverless"):
86+
return True
87+
88+
if {
89+
"databricks_connect_cluster_id",
90+
"databricks_connect_server_hostname",
91+
"databricks_connect_access_token",
92+
}.issubset(self._extra_config):
93+
return True
94+
95+
return False
9696

9797
@property
9898
def is_spark_session_connection(self) -> bool:
@@ -108,7 +108,7 @@ def _set_spark_engine_adapter_if_needed(self) -> None:
108108

109109
connect_kwargs = dict(
110110
host=self._extra_config["databricks_connect_server_hostname"],
111-
token=self._extra_config["databricks_connect_access_token"],
111+
token=self._extra_config.get("databricks_connect_access_token"),
112112
)
113113
if "databricks_connect_use_serverless" in self._extra_config:
114114
connect_kwargs["serverless"] = True

tests/core/engine_adapter/integration/__init__.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -756,7 +756,10 @@ def _get_create_user_or_role(
756756
return username, f"CREATE ROLE {username}"
757757
if self.dialect == "databricks":
758758
# Creating an account-level group in Databricks requires making REST API calls so we are going to
759-
# use a pre-created group instead. We assume the suffix on the name is the unique id
759+
# use a pre-created group instead. We assume the suffix on the name is the unique id.
760+
# In the Databricks UI, Workspace Settings -> Identity and Access, create the following groups:
761+
# - test_user, test_reader, test_writer and test_admin
762+
# (there do not need to be any users assigned to these groups)
760763
return "_".join(username.split("_")[:-1]), None
761764
if self.dialect == "bigquery":
762765
# BigQuery uses IAM service accounts that need to be pre-created

tests/core/engine_adapter/integration/config.yaml

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -128,7 +128,7 @@ gateways:
128128
warehouse: {{ env_var('SNOWFLAKE_WAREHOUSE') }}
129129
database: {{ env_var('SNOWFLAKE_DATABASE') }}
130130
user: {{ env_var('SNOWFLAKE_USER') }}
131-
password: {{ env_var('SNOWFLAKE_PASSWORD') }}
131+
private_key_path: {{ env_var('SNOWFLAKE_PRIVATE_KEY_FILE', 'tests/fixtures/snowflake/rsa_key_no_pass.p8') }}
132132
check_import: false
133133
state_connection:
134134
type: duckdb
@@ -139,7 +139,10 @@ gateways:
139139
catalog: {{ env_var('DATABRICKS_CATALOG') }}
140140
server_hostname: {{ env_var('DATABRICKS_SERVER_HOSTNAME') }}
141141
http_path: {{ env_var('DATABRICKS_HTTP_PATH') }}
142-
access_token: {{ env_var('DATABRICKS_ACCESS_TOKEN') }}
142+
auth_type: {{ env_var('DATABRICKS_AUTH_TYPE', 'databricks-oauth') }}
143+
oauth_client_id: {{ env_var('DATABRICKS_CLIENT_ID') }}
144+
oauth_client_secret: {{ env_var('DATABRICKS_CLIENT_SECRET') }}
145+
databricks_connect_use_serverless: true
143146
check_import: false
144147

145148
inttest_redshift:

tests/core/engine_adapter/integration/conftest.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
import logging
88
from pytest import FixtureRequest
99

10-
1110
from sqlmesh import Config, EngineAdapter
1211
from sqlmesh.core.constants import SQLMESH_PATH
1312
from sqlmesh.core.config.connection import (

tests/core/engine_adapter/integration/test_freshness.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,16 @@
2525
EVALUATION_SPY = None
2626

2727

28+
@pytest.fixture(autouse=True)
29+
def _skip_snowflake(ctx: TestContext):
30+
if ctx.dialect == "snowflake":
31+
# these tests use callbacks that need to run db queries within a time_travel context that changes the system time to be in the future
32+
# this causes invalid JWT's to be generated when the callbacks try to run a db query
33+
pytest.skip(
34+
"snowflake.connector generates an invalid JWT when time_travel changes the system time"
35+
)
36+
37+
2838
# Mock the snapshot evaluator's evaluate function to count the number of times it is called
2939
@pytest.fixture(autouse=True, scope="function")
3040
def _install_evaluation_spy(mocker: MockerFixture):

0 commit comments

Comments
 (0)