diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 03652c84..1c663a8e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -3,7 +3,7 @@ name: tests on: push: branches: - - master + - main pull_request: types: - opened @@ -12,26 +12,30 @@ on: - ready_for_review workflow_dispatch: +env: + UV_VERSION: "0.9.11" + jobs: lint: runs-on: ubuntu-latest timeout-minutes: 10 steps: - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 with: - python-version: "3.10" + version: ${{ env.UV_VERSION }} - name: Install dependencies run: | - pip install .[lint] + uv sync --group lint - name: Lint Python code with ruff run: | - ruff check . - ruff format --check . + uv run ruff check . + uv run ruff format --check . core_test: runs-on: ubuntu-latest + name: core_test python-${{ matrix.python-version }} strategy: matrix: python-version: ["3.10", "3.11", "3.12"] @@ -39,16 +43,14 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 with: + version: ${{ env.UV_VERSION }} python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - pip install ".[dev, memory]" - name: Run tests run: | - pytest tests/ + uv run --exact --group integration --extra memory pytest tests/ langchain_test: runs-on: ubuntu-latest @@ -59,17 +61,15 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 with: + version: ${{ env.UV_VERSION }} python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - pip install . - pip install integrations/langchain[dev] - name: Run tests + working-directory: integrations/langchain run: | - pytest integrations/langchain/tests/unit_tests + uv run --exact --group integration pytest tests/unit_tests lakebase_memory_test: runs-on: ubuntu-latest @@ -80,18 +80,18 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 with: + version: ${{ env.UV_VERSION }} python-version: ${{ matrix.python-version }} - - name: Install dependencies + - name: Run databricks-ai-bridge Tests run: | - pip install .[memory] - pip install "integrations/langchain[dev, memory]" - - name: Run tests + uv run --exact --group integration --extra memory pytest tests/databricks_ai_bridge/test_lakebase.py + - name: Run databricks-langchain Tests + working-directory: integrations/langchain run: | - pytest tests/databricks_ai_bridge/test_lakebase.py - pytest integrations/langchain/tests/unit_tests/test_checkpoint.py + uv run --exact --group integration --extra memory pytest tests/unit_tests/test_checkpoint.py langchain_cross_version_test: runs-on: ubuntu-latest @@ -131,7 +131,7 @@ jobs: cp -r older-version/integrations/langchain integrations/ - name: Install langchain dependency run: | - pip install integrations/langchain[dev] + pip install integrations/langchain --group dev - name: Run tests run: | # Only testing initialization since functionality can change @@ -149,17 +149,15 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 with: + version: ${{ env.UV_VERSION }} python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - pip install . - pip install integrations/openai[dev] - name: Run tests + working-directory: integrations/openai run: | - pytest integrations/openai/tests/unit_tests + uv run --exact --group integration pytest tests/unit_tests openai_cross_version_test: runs-on: ubuntu-latest @@ -198,7 +196,8 @@ jobs: cp -r older-version/integrations/openai integrations/ - name: Install openai dependency run: | - pip install integrations/openai[dev] + pip install . + pip install integrations/openai --group dev - name: Run tests run: | # Only testing initialization since functionality can change @@ -213,17 +212,15 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 with: + version: ${{ env.UV_VERSION }} python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - pip install . - pip install integrations/llamaindex[dev] - name: Run tests + working-directory: integrations/llamaindex run: | - pytest integrations/llamaindex/tests/unit_tests + uv run --exact --group integration pytest tests/unit_tests mcp_test: runs-on: ubuntu-latest @@ -234,17 +231,15 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 with: + version: ${{ env.UV_VERSION }} python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - pip install . - pip install -e databricks_mcp[dev] - name: Run tests + working-directory: databricks_mcp run: | - pytest databricks_mcp/tests/unit_tests + uv run --exact --group integration pytest tests/unit_tests dspy_test: runs-on: ubuntu-latest @@ -255,14 +250,12 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 + - name: Install uv + uses: astral-sh/setup-uv@v6 with: + version: ${{ env.UV_VERSION }} python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - pip install . - pip install integrations/dspy[dev] - name: Run tests + working-directory: integrations/dspy run: | - pytest integrations/dspy/tests/unit_tests + uv run --exact --group integration pytest tests/unit_tests diff --git a/databricks_mcp/pyproject.toml b/databricks_mcp/pyproject.toml index 487696a5..3abf8620 100644 --- a/databricks_mcp/pyproject.toml +++ b/databricks_mcp/pyproject.toml @@ -15,16 +15,17 @@ dependencies = [ "mlflow>=3.1" ] -[project.optional-dependencies] +[dependency-groups] dev = [ - "pytest", - "pytest-asyncio", "typing_extensions", "databricks-sdk>=0.49.0", "ruff==0.6.4", + { include-group = "integration" } ] integration = [ + "pytest", + "pytest-asyncio", "pytest-timeout>=2.3.1", ] @@ -32,6 +33,9 @@ integration = [ requires = ["hatchling"] build-backend = "hatchling.build" +[tool.uv.sources] +databricks-ai-bridge = { path = "../", editable = true } + [tool.hatch.build] include = [ "src/databricks_mcp/*" @@ -71,3 +75,10 @@ docstring-code-line-length = 88 [tool.ruff.lint.pydocstyle] convention = "google" + +[tool.pytest.ini_options] +filterwarnings = [ + "ignore::Warning", + "default::Warning:databricks_mcp", + "default::Warning:tests", +] diff --git a/integrations/dspy/pyproject.toml b/integrations/dspy/pyproject.toml index 922b63d7..6f159c6a 100644 --- a/integrations/dspy/pyproject.toml +++ b/integrations/dspy/pyproject.toml @@ -14,16 +14,22 @@ dependencies = [ "mlflow>=3.0.0", ] -[project.optional-dependencies] +[dependency-groups] dev = [ - "pytest", "ruff", + { include-group = "integration" } +] +integration = [ + "pytest", ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" +[tool.uv.sources] +databricks-ai-bridge = { path = "../../", editable = true } + [tool.hatch.build] include = [ "src/databricks_dspy/*" @@ -60,3 +66,10 @@ select = [ [tool.ruff.format] docstring-code-format = true docstring-code-line-length = 100 + +[tool.pytest.ini_options] +filterwarnings = [ + "ignore::Warning", + "default::Warning:databricks_dspy", + "default::Warning:tests", +] diff --git a/integrations/langchain/pyproject.toml b/integrations/langchain/pyproject.toml index 83ad9000..23fe4145 100644 --- a/integrations/langchain/pyproject.toml +++ b/integrations/langchain/pyproject.toml @@ -20,28 +20,34 @@ dependencies = [ ] [project.optional-dependencies] +memory = [ + "langgraph-checkpoint-postgres>=2.0.0", + "databricks-ai-bridge[memory]>=0.10.0", +] + +[dependency-groups] dev = [ - "pytest", "typing_extensions", "ruff==0.6.4", + { include-group = "integration" } ] integration = [ + "pytest", "langgraph>=0.2.27", "pytest-timeout>=2.3.1", "pytest-asyncio", "anyio", ] -memory = [ - "langgraph-checkpoint-postgres>=2.0.0", - "databricks-ai-bridge[memory]>=0.10.0", -] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" +[tool.uv.sources] +databricks-ai-bridge = { path = "../../", editable = true } + [tool.hatch.build] include = [ "src/databricks_langchain/*" @@ -81,3 +87,10 @@ docstring-code-line-length = 88 [tool.ruff.lint.pydocstyle] convention = "google" + +[tool.pytest.ini_options] +filterwarnings = [ + "ignore::Warning", + "default::Warning:databricks_langchain", + "default::Warning:tests", +] diff --git a/integrations/llamaindex/pyproject.toml b/integrations/llamaindex/pyproject.toml index 84460ea9..7e812056 100644 --- a/integrations/llamaindex/pyproject.toml +++ b/integrations/llamaindex/pyproject.toml @@ -15,15 +15,16 @@ dependencies = [ "unitycatalog-llamaindex[databricks]>=0.2.0", ] -[project.optional-dependencies] +[dependency-groups] dev = [ - "pytest", "typing_extensions", "databricks-sdk>=0.34.0", "ruff==0.6.4", + { include-group = "integration" } ] integration = [ + "pytest", "pytest-timeout>=2.3.1", ] @@ -31,6 +32,9 @@ integration = [ requires = ["hatchling"] build-backend = "hatchling.build" +[tool.uv.sources] +databricks-ai-bridge = { path = "../../", editable = true } + [tool.hatch.build] include = [ "src/databricks_llamaindex/*" @@ -69,4 +73,11 @@ docstring-code-format = true docstring-code-line-length = 88 [tool.ruff.lint.pydocstyle] -convention = "google" \ No newline at end of file +convention = "google" + +[tool.pytest.ini_options] +filterwarnings = [ + "ignore::Warning", + "default::Warning:databricks_llamaindex", + "default::Warning:tests", +] diff --git a/integrations/openai/pyproject.toml b/integrations/openai/pyproject.toml index c33cd559..1857e995 100644 --- a/integrations/openai/pyproject.toml +++ b/integrations/openai/pyproject.toml @@ -20,16 +20,17 @@ dependencies = [ "openai-agents>=0.5.0" ] -[project.optional-dependencies] +[dependency-groups] dev = [ - "pytest", - "pytest-asyncio", "typing_extensions", "databricks-sdk>=0.34.0", "ruff==0.6.4", + { include-group = "integration" } ] integration = [ + "pytest", + "pytest-asyncio", "pytest-timeout>=2.3.1", ] @@ -37,6 +38,9 @@ integration = [ requires = ["hatchling"] build-backend = "hatchling.build" +[tool.uv.sources] +databricks-ai-bridge = { path = "../../", editable = true } + [tool.hatch.build] include = [ "src/databricks_openai/*" @@ -76,3 +80,10 @@ docstring-code-line-length = 88 [tool.ruff.lint.pydocstyle] convention = "google" + +[tool.pytest.ini_options] +filterwarnings = [ + "ignore::Warning", + "default::Warning:databricks_openai", + "default::Warning:tests", +] diff --git a/pyproject.toml b/pyproject.toml index ba3e1fae..dee94ca2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,24 +21,20 @@ dependencies = [ [project.license] file = "LICENSE.txt" -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build] -include = [ - "src/*" +[project.optional-dependencies] +memory = [ + "psycopg[binary,pool]>=3.1", ] -[tool.hatch.build.targets.wheel] -packages = ["src/databricks_ai_bridge"] - -[project.optional-dependencies] +[dependency-groups] dev = [ "hatch", - "pytest", + { include-group = "integration" }, + { include-group = "lint" }, +] +integration = [ "mlflow", - "ruff==0.12.10", + "pytest", "pytest-asyncio", ] doc = [ @@ -56,10 +52,18 @@ lint = [ "ruff==0.12.10", ] -memory = [ - "psycopg[binary,pool]>=3.1", +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build] +include = [ + "src/*" ] +[tool.hatch.build.targets.wheel] +packages = ["src/databricks_ai_bridge"] + [tool.ruff] line-length = 100 target-version = "py39" @@ -94,3 +98,8 @@ convention = "google" [tool.pytest.ini_options] pythonpath = ["src"] +filterwarnings = [ + "ignore::Warning", + "default::Warning:databricks_ai_bridge", + "default::Warning:tests", +]