Skip to content

Commit e9aaa45

Browse files
committed
added readme. Fixed handle_fs_createDataset
1 parent 5ada5d3 commit e9aaa45

File tree

4 files changed

+191
-20
lines changed

4 files changed

+191
-20
lines changed

src/teradata_mcp_server/tools/fs/fs_tools.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -330,7 +330,7 @@ def handle_fs_getFeatures(conn: TeradataConnection, fs_config, *args, **kwargs):
330330
# conn (TeradataConnection) - Teradata connection object for executing SQL queries
331331
# db_name - the database name to check for existence
332332
# # Returns: True or False
333-
def handle_fs_createDataset(conn: TeradataConnection, fs_config, entity_name: str, feature_selection: str, dataset_name: str, target_database: str, *args, **kwargs):
333+
def handle_fs_createDataset(conn: TeradataConnection, fs_config, entity_name: str, feature_selection: list[str], dataset_name: str, target_database: str, *args, **kwargs):
334334
"""
335335
Create a dataset using selected features and an entity from the feature store. The dataset is created in the specified target database under the given name. Requires a configured feature store and data domain. Registers the dataset in the catalog automatically. Use this when you want to build and register a new dataset for analysis or modeling.
336336
Args:

test/EFS/Readme.md

Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
2+
3+
# EFS Test Script — Teradata Enterprise Feature Store
4+
5+
## Purpose
6+
This script (`efs_mcp_test.py`) demonstrates and tests the Teradata MCP Server's functionality for the **Teradata Enterprise Feature Store** (EFS).
7+
8+
It runs through the following EFS functions:
9+
1. `fs_isFeatureStorePresent`
10+
2. `fs_setFeatureStoreConfig`
11+
3. `fs_getDataDomains`
12+
4. `fs_getAvailableDatasets`
13+
5. `fs_getAvailableEntities`
14+
6. `fs_getFeatures`
15+
7. `fs_createDataset`
16+
17+
## Actions
18+
- **setup** – Create a demo feature store schema and load example features.
19+
- **test** – Call the EFS MCP tools above in sequence to verify functionality.
20+
- **cleanup** – Drop the demo feature store objects.
21+
22+
23+
## Requirements
24+
- Python 3.11+
25+
- Access to a Teradata system
26+
- MCP server exposing `fs_*` tools at `http://127.0.0.1:8001/mcp`
27+
- Python packages: `tdfs4ds`, `teradataml`, `langchain_mcp_adapters`
28+
29+
## Setup
30+
Create and activate a virtual environment, then install the required packages:
31+
32+
```bash
33+
python -m venv .venv
34+
source .venv/bin/activate # On Windows use: .venv\Scripts\activate
35+
pip install --upgrade pip
36+
pip install -r requirements.txt
37+
```
38+
39+
## Usage
40+
41+
Make sure that you have the Teradata MCP server with EFS tools running on `127.0.0.1:8001/mcp` eg.
42+
43+
```bash
44+
uv run teradata-mcp-server --profile dataScientist --mcp_transport streamable-http --mcp_port 8001
45+
```
46+
47+
Use the test script
48+
49+
```bash
50+
# Setup demo feature store in your schema
51+
python test/EFS/efs_mcp_test.py --action setup --database_uri "teradata://user:pass@host:1025/schema"
52+
53+
# Run the tests
54+
python test/EFS/efs_mcp_test.py --action test
55+
56+
# Clean up: drop the feature store
57+
python test/EFS/efs_mcp_test.py --action cleanup --database_uri "teradata://user:pass@host:1025/schema"
58+
```
59+
60+
## Notes
61+
- Defaults: `database_name = demo_user`, `data_domain = demo_dba`.
62+
- Dataset created in test: `test_dataset`.
63+
- `feature_selection` must be a string (e.g. `"col1,col2"`), not a list.

test/EFS/efs_mcp_test.py

Lines changed: 123 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -19,17 +19,18 @@ def main():
1919
data_domain = 'demo_dba'
2020
connection_url = args.database_uri or os.getenv("DATABASE_URI")
2121

22-
if not connection_url:
23-
raise ValueError("DATABASE_URI must be provided either as an argument or as an environment variable.")
22+
if args.action in ['setup', 'cleanup']:
23+
if not connection_url:
24+
raise ValueError("DATABASE_URI must be provided either as an argument or as an environment variable.")
2425

25-
parsed_url = urlparse(connection_url)
26-
user = parsed_url.username
27-
password = parsed_url.password
28-
host = parsed_url.hostname
29-
port = parsed_url.port or 1025
30-
database = parsed_url.path.lstrip('/') or user
26+
parsed_url = urlparse(connection_url)
27+
user = parsed_url.username
28+
password = parsed_url.password
29+
host = parsed_url.hostname
30+
port = parsed_url.port or 1025
31+
database = parsed_url.path.lstrip('/') or user
3132

32-
eng = create_context(host = host, username=user, password = password)
33+
eng = create_context(host = host, username=user, password = password)
3334

3435
if args.action=='setup':
3536
# Set up the feature store
@@ -64,18 +65,123 @@ def main():
6465
"transport": "streamable_http"
6566
}
6667
})
67-
async def _test(): # small async helper
68+
async def runner():
6869
async with mcp_client.session("mcp_server") as mcp_session:
6970
tools = await load_mcp_tools(mcp_session)
7071
fs_tools = [t for t in tools if t.name.startswith('fs_')]
7172
print("Available fs_ tools:", [t.name for t in fs_tools])
72-
fs_set_tool = next((t for t in fs_tools if t.name == 'fs_setFeatureStoreConfig'), None)
73-
if not fs_set_tool:
74-
raise RuntimeError('fs_setFeatureStoreConfig tool not found')
75-
response = await fs_set_tool.arun({"data_domain": data_domain, "db_name": database_name})
76-
print("fs_setFeatureStoreConfig response:", response)
77-
import asyncio
78-
asyncio.run(_test())
73+
74+
# Map tool names for quick access
75+
tool_by_name = {t.name: t for t in fs_tools}
76+
77+
import json as _json
78+
79+
async def _call(name: str, payload: dict | None = None):
80+
if name not in tool_by_name:
81+
raise RuntimeError(f"Tool {name} not found")
82+
tool = tool_by_name[name]
83+
tool_input = payload or {}
84+
# StructuredTool expects a single positional/named argument: tool_input
85+
resp = await tool.arun(tool_input=tool_input)
86+
# Try to parse JSON text if needed
87+
if isinstance(resp, str):
88+
try:
89+
return _json.loads(resp)
90+
except Exception:
91+
return resp
92+
return resp
93+
94+
# 1) fs_isFeatureStorePresent
95+
print("\n[1/8] fs_isFeatureStorePresent…")
96+
r1 = await _call('fs_isFeatureStorePresent', {"db_name": database_name})
97+
print("fs_isFeatureStorePresent →", r1)
98+
99+
# 2) fs_setFeatureStoreConfig
100+
print("\n[2/8] fs_setFeatureStoreConfig…")
101+
r_set = await _call('fs_setFeatureStoreConfig', {"db_name": database_name, "data_domain": data_domain})
102+
print("fs_setFeatureStoreConfig →", r_set)
103+
104+
# 3) fs_getDataDomains
105+
print("\n[3/8] fs_getDataDomains…")
106+
r2 = await _call('fs_getDataDomains')
107+
print("fs_getDataDomains →", r2)
108+
109+
# 4) fs_getAvailableDatasets
110+
print("\n[4/8] fs_getAvailableDatasets…")
111+
r3 = await _call('fs_getAvailableDatasets')
112+
print("fs_getAvailableDatasets →", r3)
113+
114+
# 5) fs_getAvailableEntities
115+
print("\n[5/8] fs_getAvailableEntities…")
116+
r4 = await _call('fs_getAvailableEntities')
117+
print("fs_getAvailableEntities →", r4)
118+
119+
# 6) fs_setFeatureStoreConfig (entity)
120+
print("\n[6/8] fs_setFeatureStoreConfig (entity)…")
121+
def _extract_entity_name(payload):
122+
res = payload.get("results") if isinstance(payload, dict) else payload
123+
if isinstance(res, list) and res and isinstance(res[0], dict):
124+
for key in ("ENTITY_NAME", "entity_name", "entity", "name"):
125+
if key in res[0] and res[0][key]:
126+
return res[0][key]
127+
if isinstance(res, str):
128+
lines = [ln.strip() for ln in res.splitlines() if ln.strip()]
129+
if lines:
130+
parts = lines[-1].split()
131+
if parts:
132+
return parts[-1]
133+
return None
134+
entity_name = _extract_entity_name(r4) or "tablename"
135+
r_set_entity = await _call('fs_setFeatureStoreConfig', {"entity": entity_name})
136+
print("fs_setFeatureStoreConfig (entity) →", r_set_entity)
137+
138+
# 7) fs_getFeatures
139+
print("\n[7/8] fs_getFeatures…")
140+
r5 = await _call('fs_getFeatures')
141+
print("fs_getFeatures →", r5)
142+
143+
# Extract feature names from r5
144+
def _extract_feature_names(payload):
145+
# Accept either {"results": [...]} or a raw list
146+
items = payload.get("results") if isinstance(payload, dict) else payload
147+
if not isinstance(items, list):
148+
return []
149+
names = []
150+
for row in items:
151+
if not isinstance(row, dict):
152+
continue
153+
for key in ("feature_name", "FEATURE_NAME", "name", "FEATURE", "feature"):
154+
if key in row and row[key] is not None:
155+
names.append(row[key])
156+
break
157+
return names
158+
159+
feature_selection = _extract_feature_names(r5)
160+
print(f"Extracted {len(feature_selection)} feature names for dataset creation")
161+
162+
# 8) fs_createDataset
163+
print("\n[8/8] fs_createDataset…")
164+
create_payload = {
165+
"entity_name": entity_name,
166+
"feature_selection": feature_selection,
167+
"dataset_name": "test_efs_dataset",
168+
"target_database": database_name,
169+
}
170+
print("fs_createDataset payload:", create_payload)
171+
r6 = await _call('fs_createDataset', create_payload)
172+
173+
# If tool returned an error payload (not exception), also retry with CSV features
174+
if isinstance(r6, dict) and isinstance(r6.get("results"), dict):
175+
err = r6["results"].get("error")
176+
if isinstance(err, str) and ("NoneType" in err or "string" in err):
177+
create_payload_retry = dict(create_payload)
178+
create_payload_retry["feature_selection"] = ",".join(feature_selection)
179+
print("Retrying fs_createDataset with CSV features (error payload):", create_payload_retry)
180+
r6 = await _call('fs_createDataset', create_payload_retry)
181+
182+
print("fs_createDataset →", r6)
183+
184+
asyncio.run(runner())
79185

80186
elif args.action=='cleanup':
81187
list_of_tables = db_list_tables()

test/EFS/requirements.txt

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
tdfs4ds
22
teradataml
33
pandas
4-
json
5-
sqlalchemy
4+
sqlalchemy
5+
langchain_mcp_adapters
6+
mcp
7+
anyio

0 commit comments

Comments
 (0)