Skip to content

Commit aca15a6

Browse files
fix unit tests pointing to a databricks workspace outside of vpn
1 parent 7dcf449 commit aca15a6

File tree

5 files changed

+14
-18
lines changed

5 files changed

+14
-18
lines changed

.github/workflows/flights_liquibase.yml

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -103,9 +103,8 @@ jobs:
103103
- name: Run unit tests
104104
env:
105105
DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }}
106-
DATABRICKS_CLIENT_ID: ${{ secrets.DATABRICKS_CLIENT_ID }}
107-
DATABRICKS_CLIENT_SECRET: ${{ secrets.DATABRICKS_CLIENT_SECRET }}
108-
DBCONNECT_SERVERLESS: "false"
106+
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN_TST }}
107+
DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
109108
run: |
110109
pytest flights/flights-liquibase/tests/
111110
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
wheel
22
pytest
3-
databricks-sdk[notebook]==0.35.0
4-
pandas==2.2.3
3+
databricks-sdk[notebook]>=0.46.0
4+
pandas==2.2.3
Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,2 @@
1-
liquibase.command.url: jdbc:databricks://e2-demo-field-eng.cloud.databricks.com:443;AuthMech=3;httpPath=/sql/1.0/warehouses/4b9b953939869799;
2-
liquibase.databricks.catalog: main
1+
liquibase.databricks.catalog: liquibase
32
liquibase.databricks.schema: lr_liquibase_test
4-
liquibase.command.username: token
5-
liquibase.command.password: ${{ secrets.DATABRICKS_TOKEN_TST }}
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
databricks-connect==15.4.2
1+
databricks-connect>=17.0.0

flights/flights-liquibase/tests/unit_transforms/test_flight_transforms.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,17 @@
55

66
@pytest.fixture(scope="module")
77
def spark_session():
8-
#try:
9-
# from databricks.connect import DatabricksSession
10-
if os.environ.get("DBCONNECT_SERVERLESS", "false").lower() == "true":
11-
return DatabricksSession.builder.serverless(True).getOrCreate()
8+
try:
9+
from databricks.connect import DatabricksSession
10+
# if os.environ.get("DBCONNECT_SERVERLESS", "false").lower() == "true":
11+
# return DatabricksSession.builder.serverless(True).getOrCreate()
1212
# else:
13-
# return DatabricksSession.builder.getOrCreate()
13+
return DatabricksSession.builder.getOrCreate()
1414
# except (ValueError, RuntimeError):
1515
# from databricks.connect import DatabricksSession
16-
# return DatabricksSession.builder.profile("unit_tests").getOrCreate()
17-
#except (ImportError, ValueError, RuntimeError):
18-
else:
16+
# return DatabricksSession.builder.profile("unit_tests").getOrCreate()
17+
except (ImportError, ValueError, RuntimeError):
18+
# else:
1919
print("No Databricks Connect, build and return local SparkSession")
2020
from pyspark.sql import SparkSession
2121
return SparkSession.builder.getOrCreate()

0 commit comments

Comments
 (0)