Skip to content

Commit 6f94853

Browse files
🍱 restructure project to make it databricks.labs compliant
🍱 restructure project to make it `databricks.labs` compliant
2 parents 756a971 + 13f8ebb commit 6f94853

32 files changed

+1050
-882
lines changed

.github/workflows/unit_tests.yml

Lines changed: 25 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -1,66 +1,43 @@
11
name: Unit Tests
22

33
on:
4-
push:
5-
paths:
6-
- "servers/**"
7-
- ".github/workflows/unit_tests.yml"
84
pull_request:
9-
paths:
10-
- "servers/**"
11-
- ".github/workflows/unit_tests.yml"
5+
types: [opened, synchronize]
6+
merge_group:
7+
types: [checks_requested]
8+
push:
9+
# Always run on push to main. The build cache can only be reused
10+
# if it was saved by a run from the repository's default branch.
11+
# The run result will be identical to that from the merge queue
12+
# because the commit is identical, yet we need to perform it to
13+
# seed the build cache.
14+
branches:
15+
- master
1216

1317
jobs:
14-
discover:
15-
runs-on:
16-
group: databrickslabs-protected-runner-group
17-
labels: linux-ubuntu-latest
18-
outputs:
19-
server_dirs: ${{ steps.set-matrix.outputs.server_dirs }}
20-
21-
steps:
22-
- name: Checkout code
23-
uses: actions/checkout@v4
24-
25-
- name: Discover server dirs with tests
26-
id: set-matrix
27-
run: |
28-
dirs=$(find servers -mindepth 1 -maxdepth 1 -type d -exec test -d "{}/tests" \; -print | sed 's|servers/||' | jq -R . | jq -cs .)
29-
echo "Found directories: $dirs"
30-
echo "server_dirs=$dirs" >> "$GITHUB_OUTPUT"
31-
32-
3318

3419
test:
35-
needs: discover
3620
runs-on:
3721
group: databrickslabs-protected-runner-group
3822
labels: linux-ubuntu-latest
39-
strategy:
40-
matrix:
41-
server-dir: ${{ fromJson(needs.discover.outputs.server_dirs) }}
4223

4324
steps:
4425
- name: Checkout code
4526
uses: actions/checkout@v4
4627

47-
- name: Set up Python
48-
uses: actions/setup-python@v5
28+
- name: Install uv and set the python version
29+
uses: astral-sh/setup-uv@v5
4930
with:
5031
python-version: "3.12"
32+
enable-cache: true
33+
cache-dependency-glob: "uv.lock"
5134

52-
- name: Install uv
53-
uses: astral-sh/setup-uv@v5
54-
55-
- name: Install dependencies with uv
56-
working-directory: servers/${{ matrix.server-dir }}
57-
run: |
58-
uv sync
59-
uv pip install --group dev
35+
- name: Install the project
36+
run: uv sync --locked --all-extras --dev
6037

6138
- name: Run tests
62-
working-directory: servers/${{ matrix.server-dir }}
6339
run: uv run pytest tests
40+
6441
lint:
6542
runs-on:
6643
group: databrickslabs-protected-runner-group
@@ -69,15 +46,15 @@ jobs:
6946
- name: Checkout code
7047
uses: actions/checkout@v4
7148

72-
- name: Set up Python
73-
uses: actions/setup-python@v5
49+
- name: Install uv and set the python version
50+
uses: astral-sh/setup-uv@v5
7451
with:
7552
python-version: "3.12"
53+
enable-cache: true
54+
cache-dependency-glob: "uv.lock"
7655

77-
- name: Install uv
78-
run: |
79-
curl -LsSf https://astral.sh/uv/install.sh | sh
80-
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
56+
- name: Install the project
57+
run: uv sync --locked --all-extras --dev
8158

8259
- name: Run lint checks
83-
run: ./dev/lint.sh
60+
run: make lint

.gitignore

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,3 +120,8 @@ a.md
120120

121121
# Ignore a gunicorn config file
122122
gunicorn.conf.py
123+
124+
# ignore version file
125+
src/databricks/labs/mcp/_version.py
126+
127+
.ruff_cache/

CONTRIBUTING.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,10 @@ before sending a pull request. For smaller changes (e.g. fixing a bug, adding a
1010

1111
### Running tests
1212

13-
First, install test requirements from within the directory of the server you're working on:
13+
First, install all requirements from within the directory of the server you're working on:
1414

1515
```bash
1616
uv sync
17-
uv pip install --group dev
1817
```
1918

2019
To run tests:
@@ -25,7 +24,8 @@ uv run pytest tests
2524

2625
To run and fix lint errors, run the following from the repo root directory:
2726
```bash
28-
./dev/lint.sh --fix
27+
make fmt # to format code
28+
make lint # to run linters
2929
```
3030

3131
### Guidelines for MCP servers

Makefile

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
fmt:
2+
uv run black .
3+
uv run ruff check . --fix
4+
5+
lint:
6+
uv run black . --check
7+
uv run ruff check .

README.md

Lines changed: 40 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,25 +2,45 @@
22

33
![Test Status](https://github.com/databrickslabs/mcp/actions/workflows/unit_tests.yml/badge.svg)
44

5+
Table of Contents
6+
=================
7+
8+
- [Databricks MCP servers](#databricks-mcp-servers)
9+
- [Table of Contents](#table-of-contents)
10+
- [Overview](#overview)
11+
- [Unity Catalog Server](#unity-catalog-server)
12+
- [Overview](#overview-1)
13+
- [Usage](#usage)
14+
- [Supported tools](#supported-tools)
15+
- [Developer Tools Server](#developer-tools-server)
16+
- [Support](#support)
17+
- [Contributing](#contributing)
18+
519
## Overview
620
An experimental collection of [MCP](https://modelcontextprotocol.io/introduction) servers to help AI agents fetch enterprise data from Databricks, automate common developer actions on Databricks, etc:
721

822
* ![status: Beta](https://img.shields.io/badge/status-Beta-yellow?style=flat-square&logo=databricks)
923
[Databricks Unity Catalog server](./servers/unity_catalog/README.md): Fetch data and run tools registered in from Unity Catalog, making agents aware of your enterprise data
1024
* ![status: Under construction](https://img.shields.io/badge/status-Under_construction-red?style=flat-square&logo=databricks)
11-
[Databricks developer tools server](./servers/developer_tools/README.md): Perform common developer actions in Databricks, like creating and updating notebooks, running jobs, etc. This server is not yet usable, but contributions are welcome!
25+
[Databricks developer tools server](): Perform common developer actions in Databricks, like creating and updating notebooks, running jobs, etc. This server is not yet usable, but contributions are welcome!
1226

1327
The set of servers and tools in this repo is fluid and will evolve over time. We welcome contributions to this repo - please first
1428
read the [contributor guidelines](CONTRIBUTING.md) to streamline the process and discover areas where help is needed.
1529

16-
## Usage
17-
See the `README.md` in each server's directory for detailed instructions.
18-
For most servers, the following steps work:
30+
## Unity Catalog Server
31+
32+
![status: Beta](https://img.shields.io/badge/status-Beta-yellow?style=flat-square&logo=databricks)
33+
34+
### Overview
35+
A Model Context Protocol server that exposes structured and unstructured data in Unity Catalog ([vector search indexes](https://docs.databricks.com/gcp/en/generative-ai/vector-search), [functions](https://docs.databricks.com/aws/en/generative-ai/agent-framework/create-custom-tool), and [Genie spaces](https://docs.databricks.com/aws/en/genie/)), as tools.
36+
37+
<img src="docs/images/demo.png" alt="Demo image" height="400px">
1938

39+
### Usage
2040
1. Install [uv](https://docs.astral.sh/uv/getting-started/installation/)
2141
1. Install Python using `uv python install 3.12`
2242
1. [Configure Databricks credentials](https://docs.databricks.com/aws/en/dev-tools/cli/authentication) with access to the required APIs
23-
1. Add the server to your MCP client configuration. For example, to use the Databricks Unity Catalog MCP server with Claude Desktop, add the following to your `claude_desktop_config.json`:
43+
1. Add the server to your MCP client configuration. For example, to use this server with Claude Desktop, add the following to your `claude_desktop_config.json`:
2444

2545
```json
2646
{
@@ -29,7 +49,7 @@ For most servers, the following steps work:
2949
"command": "/path/to/uv/executable/uv",
3050
"args": [
3151
"--directory",
32-
"/path/to/this/repo/servers/unity_catalog",
52+
"/path/to/this/repo",
3353
"run",
3454
"unitycatalog-mcp",
3555
"-s",
@@ -42,6 +62,20 @@ For most servers, the following steps work:
4262
}
4363
```
4464

65+
### Supported tools
66+
67+
The list of tools supported by this server is dynamically inferred at startup time based on the functions and vector search indexes
68+
within the specified Unity Catalog schema, as well as any specified Genie spaces. In particular, the server exposes
69+
the following tools:
70+
71+
* **UC Functions**: for each UC function, the server exposes a tool with the same name, arguments, and return type as the function
72+
* **Vector search indexes**: for each vector search index, the server exposes a tool for querying that vector search index
73+
* **Genie spaces**: for each Genie space, the server exposes tools for managing conversations and sending questions to the space
74+
75+
## Developer Tools Server
76+
77+
This server is currently under construction. It is not yet usable, but contributions are welcome!
78+
4579
## Support
4680
Please note that all projects in the `databrickslabs` GitHub organization are provided for your exploration only, and are not formally supported by Databricks with Service Level Agreements (SLAs). They are provided AS-IS and we do not make any guarantees of any kind. Please do not submit a support ticket relating to any issues arising from the use of these projects.
4781

dev/lint.sh

Lines changed: 0 additions & 33 deletions
This file was deleted.
File renamed without changes.

pyproject.toml

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
[project]
2+
3+
name = "databricks-labs-mcp"
4+
dynamic = ["version", "readme"]
5+
description = "Databricks MCP servers"
6+
authors = [{ name = "renardeinside", email = "[email protected]" }]
7+
requires-python = ">=3.10"
8+
keywords = ["databricks", "unity catalog", "mcp", "agents", "llm", "automation", "genie"]
9+
10+
dependencies = [
11+
"mcp>=1.2.1",
12+
"pydantic>=2.10.6",
13+
"pydantic-settings>=2.7.1",
14+
"unitycatalog-ai>=0.1.0",
15+
"databricks-sdk>=0.49.0",
16+
"databricks-openai>=0.3.1",
17+
]
18+
license-files = ["LICENSE", "NOTICE"]
19+
20+
[tool.uv]
21+
dev-dependencies = [
22+
"black>=25.1.0",
23+
"pyright>=1.1.393",
24+
"ruff>=0.9.4",
25+
"pytest>=8.3.4",
26+
"isort>=6.0.1",
27+
]
28+
29+
[project.scripts]
30+
unitycatalog-mcp = "databricks.labs.mcp.servers.unity_catalog:main"
31+
32+
[build-system]
33+
requires = ["hatchling", "hatch-fancy-pypi-readme", "hatch-vcs"]
34+
build-backend = "hatchling.build"
35+
36+
# for compatability with databricks.labs.* layout
37+
[tool.hatch.build]
38+
sources = ["src"]
39+
include = ["src"]
40+
41+
42+
[tool.hatch.version]
43+
source = "vcs"
44+
45+
[tool.hatch.build.hooks.vcs]
46+
version-file = "src/databricks/labs/mcp/_version.py"
47+
48+
49+
[tool.hatch.metadata.hooks.fancy-pypi-readme]
50+
content-type = "text/markdown"
51+
52+
[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
53+
path = "README.md"
54+
55+
[[tool.hatch.metadata.hooks.fancy-pypi-readme.substitutions]]
56+
# replace relative links with absolute links
57+
pattern = '\[(.+?)\]\(((?!https?://)\S+?)\)'
58+
replacement = '[\1](https://github.com/databrickslabs/mcp/tree/main/\g<2>)'
59+
60+
[[tool.hatch.metadata.hooks.fancy-pypi-readme.substitutions]]
61+
# replace relative image links with absolute links
62+
pattern = '<img (.*?)src="((?!https?://)\S+?)"(.*?)>'
63+
replacement = '<img \1src="https://raw.githubusercontent.com/databrickslabs/mcp/refs/heads/main/\g<2>"\g<3>>'
64+
65+
66+
[tool.pytest.ini_options]
67+
testpaths = ["tests"]
68+
python_files = "test_*.py"
69+
python_classes = "Test*"
70+
python_functions = "test_*"

servers/unity_catalog/LICENSE

Lines changed: 0 additions & 23 deletions
This file was deleted.

0 commit comments

Comments
 (0)