Skip to content

Commit 31cb958

Browse files
committed
chore: use uv for running tests
1 parent ad5ada5 commit 31cb958

File tree

7 files changed

+121
-64
lines changed

7 files changed

+121
-64
lines changed

.github/workflows/main.yml

Lines changed: 47 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -12,43 +12,45 @@ on:
1212
- ready_for_review
1313
workflow_dispatch:
1414

15+
env:
16+
UV_VERSION: "0.9.11"
17+
1518
jobs:
1619
lint:
1720
runs-on: ubuntu-latest
1821
timeout-minutes: 10
1922
steps:
2023
- uses: actions/checkout@v4
21-
- name: Set up Python
22-
uses: actions/setup-python@v5
24+
- name: Install uv
25+
uses: astral-sh/setup-uv@v6
2326
with:
24-
python-version: "3.10"
27+
version: ${{ env.UV_VERSION }}
2528
- name: Install dependencies
2629
run: |
27-
pip install . --group lint
30+
uv sync --group lint
2831
- name: Lint Python code with ruff
2932
run: |
30-
ruff check .
31-
ruff format --check .
33+
uv run ruff check .
34+
uv run ruff format --check .
3235
3336
core_test:
3437
runs-on: ubuntu-latest
38+
name: core_test python-${{ matrix.python-version }}
3539
strategy:
3640
matrix:
3741
python-version: ["3.10", "3.11", "3.12"]
3842
timeout-minutes: 20
3943
steps:
4044
- name: Checkout code
4145
uses: actions/checkout@v4
42-
- name: Set up Python
43-
uses: actions/setup-python@v5
46+
- name: Install uv
47+
uses: astral-sh/setup-uv@v6
4448
with:
49+
version: ${{ env.UV_VERSION }}
4550
python-version: ${{ matrix.python-version }}
46-
- name: Install dependencies
47-
run: |
48-
pip install ".[memory]" --group dev
4951
- name: Run tests
5052
run: |
51-
pytest tests/
53+
uv run --exact --group integration --extra memory pytest tests/
5254
5355
langchain_test:
5456
runs-on: ubuntu-latest
@@ -59,17 +61,15 @@ jobs:
5961
steps:
6062
- name: Checkout code
6163
uses: actions/checkout@v4
62-
- name: Set up Python
63-
uses: actions/setup-python@v5
64+
- name: Install uv
65+
uses: astral-sh/setup-uv@v6
6466
with:
67+
version: ${{ env.UV_VERSION }}
6568
python-version: ${{ matrix.python-version }}
66-
- name: Install dependencies
67-
run: |
68-
pip install .
69-
pip install integrations/langchain --group dev
7069
- name: Run tests
70+
working-directory: integrations/langchain
7171
run: |
72-
pytest integrations/langchain/tests/unit_tests
72+
uv run --exact --group integration pytest tests/unit_tests
7373
7474
lakebase_memory_test:
7575
runs-on: ubuntu-latest
@@ -80,18 +80,18 @@ jobs:
8080
steps:
8181
- name: Checkout code
8282
uses: actions/checkout@v4
83-
- name: Set up Python
84-
uses: actions/setup-python@v5
83+
- name: Install uv
84+
uses: astral-sh/setup-uv@v6
8585
with:
86+
version: ${{ env.UV_VERSION }}
8687
python-version: ${{ matrix.python-version }}
87-
- name: Install dependencies
88+
- name: Run databricks-ai-bridge Tests
8889
run: |
89-
pip install .[memory]
90-
pip install "integrations/langchain[memory]" --group dev
91-
- name: Run tests
90+
uv run --exact --group integration --extra memory pytest tests/databricks_ai_bridge/test_lakebase.py
91+
- name: Run databricks-langchain Tests
92+
working-directory: integrations/langchain
9293
run: |
93-
pytest tests/databricks_ai_bridge/test_lakebase.py
94-
pytest integrations/langchain/tests/unit_tests/test_checkpoint.py
94+
uv run --exact --group integration --extra memory pytest integrations/langchain/tests/unit_tests/test_checkpoint.py
9595
9696
langchain_cross_version_test:
9797
runs-on: ubuntu-latest
@@ -149,17 +149,15 @@ jobs:
149149
steps:
150150
- name: Checkout code
151151
uses: actions/checkout@v4
152-
- name: Set up Python
153-
uses: actions/setup-python@v5
152+
- name: Install uv
153+
uses: astral-sh/setup-uv@v6
154154
with:
155+
version: ${{ env.UV_VERSION }}
155156
python-version: ${{ matrix.python-version }}
156-
- name: Install dependencies
157-
run: |
158-
pip install .
159-
pip install integrations/openai --group dev
160157
- name: Run tests
158+
working-directory: integrations/openai
161159
run: |
162-
pytest integrations/openai/tests/unit_tests
160+
uv run --exact --group integration pytest tests/unit_tests
163161
164162
openai_cross_version_test:
165163
runs-on: ubuntu-latest
@@ -214,17 +212,15 @@ jobs:
214212
steps:
215213
- name: Checkout code
216214
uses: actions/checkout@v4
217-
- name: Set up Python
218-
uses: actions/setup-python@v5
215+
- name: Install uv
216+
uses: astral-sh/setup-uv@v6
219217
with:
218+
version: ${{ env.UV_VERSION }}
220219
python-version: ${{ matrix.python-version }}
221-
- name: Install dependencies
222-
run: |
223-
pip install .
224-
pip install integrations/llamaindex --group dev
225220
- name: Run tests
221+
working-directory: integrations/llamaindex
226222
run: |
227-
pytest integrations/llamaindex/tests/unit_tests
223+
uv run --exact --group integration pytest tests/unit_tests
228224
229225
mcp_test:
230226
runs-on: ubuntu-latest
@@ -235,17 +231,15 @@ jobs:
235231
steps:
236232
- name: Checkout code
237233
uses: actions/checkout@v4
238-
- name: Set up Python
239-
uses: actions/setup-python@v5
234+
- name: Install uv
235+
uses: astral-sh/setup-uv@v6
240236
with:
237+
version: ${{ env.UV_VERSION }}
241238
python-version: ${{ matrix.python-version }}
242-
- name: Install dependencies
243-
run: |
244-
pip install .
245-
pip install -e databricks_mcp --group dev
246239
- name: Run tests
240+
working-directory: databricks_mcp
247241
run: |
248-
pytest databricks_mcp/tests/unit_tests
242+
uv run --exact --group integration pytest tests/unit_tests
249243
250244
dspy_test:
251245
runs-on: ubuntu-latest
@@ -256,14 +250,12 @@ jobs:
256250
steps:
257251
- name: Checkout code
258252
uses: actions/checkout@v4
259-
- name: Set up Python
260-
uses: actions/setup-python@v5
253+
- name: Install uv
254+
uses: astral-sh/setup-uv@v6
261255
with:
256+
version: ${{ env.UV_VERSION }}
262257
python-version: ${{ matrix.python-version }}
263-
- name: Install dependencies
264-
run: |
265-
pip install .
266-
pip install integrations/dspy --group dev
267258
- name: Run tests
259+
working-directory: integrations/dspy
268260
run: |
269-
pytest integrations/dspy/tests/unit_tests
261+
uv run --exact --group integration pytest tests/unit_tests

databricks_mcp/pyproject.toml

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,21 +17,25 @@ dependencies = [
1717

1818
[dependency-groups]
1919
dev = [
20-
"pytest",
21-
"pytest-asyncio",
2220
"typing_extensions",
2321
"databricks-sdk>=0.49.0",
2422
"ruff==0.6.4",
23+
{ include-group = "integration" }
2524
]
2625

2726
integration = [
27+
"pytest",
28+
"pytest-asyncio",
2829
"pytest-timeout>=2.3.1",
2930
]
3031

3132
[build-system]
3233
requires = ["hatchling"]
3334
build-backend = "hatchling.build"
3435

36+
[tool.uv.sources]
37+
databricks-ai-bridge = { path = "../", editable = true }
38+
3539
[tool.hatch.build]
3640
include = [
3741
"src/databricks_mcp/*"
@@ -71,3 +75,10 @@ docstring-code-line-length = 88
7175

7276
[tool.ruff.lint.pydocstyle]
7377
convention = "google"
78+
79+
[tool.pytest.ini_options]
80+
filterwarnings = [
81+
"ignore::Warning",
82+
"default::Warning:databricks_mcp",
83+
"default::Warning:tests",
84+
]

integrations/dspy/pyproject.toml

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,14 +16,20 @@ dependencies = [
1616

1717
[dependency-groups]
1818
dev = [
19-
"pytest",
2019
"ruff",
20+
{ include-group = "integration" }
21+
]
22+
integration = [
23+
"pytest",
2124
]
2225

2326
[build-system]
2427
requires = ["hatchling"]
2528
build-backend = "hatchling.build"
2629

30+
[tool.uv.sources]
31+
databricks-ai-bridge = { path = "../../", editable = true }
32+
2733
[tool.hatch.build]
2834
include = [
2935
"src/databricks_dspy/*"
@@ -60,3 +66,10 @@ select = [
6066
[tool.ruff.format]
6167
docstring-code-format = true
6268
docstring-code-line-length = 100
69+
70+
[tool.pytest.ini_options]
71+
filterwarnings = [
72+
"ignore::Warning",
73+
"default::Warning:databricks_dspy",
74+
"default::Warning:tests",
75+
]

integrations/langchain/pyproject.toml

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,13 @@ memory = [
2727

2828
[dependency-groups]
2929
dev = [
30-
"pytest",
3130
"typing_extensions",
3231
"ruff==0.6.4",
32+
{ include-group = "integration" }
3333
]
3434

3535
integration = [
36+
"pytest",
3637
"langgraph>=0.2.27",
3738
"pytest-timeout>=2.3.1",
3839
"pytest-asyncio",
@@ -44,6 +45,9 @@ integration = [
4445
requires = ["hatchling"]
4546
build-backend = "hatchling.build"
4647

48+
[tool.uv.sources]
49+
databricks-ai-bridge = { path = "../../", editable = true }
50+
4751
[tool.hatch.build]
4852
include = [
4953
"src/databricks_langchain/*"
@@ -83,3 +87,10 @@ docstring-code-line-length = 88
8387

8488
[tool.ruff.lint.pydocstyle]
8589
convention = "google"
90+
91+
[tool.pytest.ini_options]
92+
filterwarnings = [
93+
"ignore::Warning",
94+
"default::Warning:databricks_langchain",
95+
"default::Warning:tests",
96+
]

integrations/llamaindex/pyproject.toml

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,20 +17,24 @@ dependencies = [
1717

1818
[dependency-groups]
1919
dev = [
20-
"pytest",
2120
"typing_extensions",
2221
"databricks-sdk>=0.34.0",
2322
"ruff==0.6.4",
23+
{ include-group = "integration" }
2424
]
2525

2626
integration = [
27+
"pytest",
2728
"pytest-timeout>=2.3.1",
2829
]
2930

3031
[build-system]
3132
requires = ["hatchling"]
3233
build-backend = "hatchling.build"
3334

35+
[tool.uv.sources]
36+
databricks-ai-bridge = { path = "../../", editable = true }
37+
3438
[tool.hatch.build]
3539
include = [
3640
"src/databricks_llamaindex/*"
@@ -70,3 +74,10 @@ docstring-code-line-length = 88
7074

7175
[tool.ruff.lint.pydocstyle]
7276
convention = "google"
77+
78+
[tool.pytest.ini_options]
79+
filterwarnings = [
80+
"ignore::Warning",
81+
"default::Warning:databricks_llamaindex",
82+
"default::Warning:tests",
83+
]

integrations/openai/pyproject.toml

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,21 +22,25 @@ dependencies = [
2222

2323
[dependency-groups]
2424
dev = [
25-
"pytest",
26-
"pytest-asyncio",
2725
"typing_extensions",
2826
"databricks-sdk>=0.34.0",
2927
"ruff==0.6.4",
28+
{ include-group = "integration" }
3029
]
3130

3231
integration = [
32+
"pytest",
33+
"pytest-asyncio",
3334
"pytest-timeout>=2.3.1",
3435
]
3536

3637
[build-system]
3738
requires = ["hatchling"]
3839
build-backend = "hatchling.build"
3940

41+
[tool.uv.sources]
42+
databricks-ai-bridge = { path = "../../", editable = true }
43+
4044
[tool.hatch.build]
4145
include = [
4246
"src/databricks_openai/*"
@@ -76,3 +80,10 @@ docstring-code-line-length = 88
7680

7781
[tool.ruff.lint.pydocstyle]
7882
convention = "google"
83+
84+
[tool.pytest.ini_options]
85+
filterwarnings = [
86+
"ignore::Warning",
87+
"default::Warning:databricks_openai",
88+
"default::Warning:tests",
89+
]

0 commit comments

Comments
 (0)