Skip to content

Commit 14b4951

Browse files
committed
moved EFS config setting method to the EFS tools and use the SQLalchemy connection
1 parent 47a5b16 commit 14b4951

File tree

2 files changed

+66
-36
lines changed

2 files changed

+66
-36
lines changed

src/teradata_mcp_server/server.py

Lines changed: 9 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -498,45 +498,18 @@ async def fs_setFeatureStoreConfig(
498498
data_domain: Optional[str] = None,
499499
db_name: Optional[str] = None,
500500
entity: Optional[str] = None,
501-
) -> ResponseType:
502-
if db_name:
503-
if tdfs4ds.connect(database=db_name):
504-
logger.info(f"connected to the feature store of the {db_name} database")
505-
# Reset data_domain if DB name changes
506-
if not (fs_config.db_name and fs_config.db_name.upper() == db_name.upper()):
507-
fs_config.data_domain = None
508-
509-
fs_config.db_name = db_name
510-
logger.info(f"connected to the feature store of the {db_name} database")
511-
fs_config.feature_catalog = f"{db_name}.{tdfs4ds.FEATURE_CATALOG_NAME_VIEW}"
512-
logger.info(f"feature catalog {fs_config.feature_catalog}")
513-
fs_config.process_catalog = f"{db_name}.{tdfs4ds.PROCESS_CATALOG_NAME_VIEW}"
514-
logger.info(f"process catalog {fs_config.process_catalog}")
515-
fs_config.dataset_catalog = f"{db_name}.FS_V_FS_DATASET_CATALOG" # <- fixed line
516-
logger.info(f"dataset catalog {fs_config.dataset_catalog}")
517-
518-
if fs_config.db_name is not None and data_domain is not None:
519-
sql_query_ = f"SEL count(*) AS N FROM {fs_config.feature_catalog} WHERE UPPER(data_domain) = '{data_domain.upper()}'"
520-
logger.info(f"{sql_query_}")
521-
result = tdml.execute_sql(sql_query_)
522-
logger.info(f"{result}")
523-
if result.fetchall()[0][0] > 0:
524-
fs_config.data_domain = data_domain
525-
else:
526-
fs_config.data_domain = None
527-
528-
if fs_config.db_name is not None and fs_config.data_domain is not None and entity is not None:
529-
sql_query_ = f"SEL count(*) AS N FROM {fs_config.feature_catalog} WHERE UPPER(data_domain) = '{data_domain.upper()}' AND ENTITY_NAME = '{entity.upper()}'"
530-
logger.info(f"{sql_query_}")
531-
result = tdml.execute_sql(sql_query_)
532-
logger.info(f"{result}")
533-
if result.fetchall()[0][0] > 0:
534-
fs_config.entity = entity
535-
return format_text_response(f"Feature store config updated: {fs_config.dict(exclude_none=True)}")
501+
) -> td.FeatureStoreConfig:
502+
with _tdconn.engine.connect() as conn:
503+
return fs_config.fs_setFeatureStoreConfig(
504+
conn=conn,
505+
db_name=db_name,
506+
data_domain=data_domain,
507+
entity=entity,
508+
)
536509

537510
@mcp.tool(description="Display the current feature store configuration (database and data domain).")
538511
async def fs_getFeatureStoreConfig() -> ResponseType:
539-
return format_text_response(f"Current feature store config: {fs_config.dict(exclude_none=True)}")
512+
return format_text_response(f"Current feature store config: {fs_config.model_dump(exclude_none=True)}")
540513

541514
#------------------ Main ------------------#
542515
# Main function to start the MCP server

src/teradata_mcp_server/tools/fs/fs_utils.py

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
11
from typing import Any, List, Optional
2+
from typing import Optional
23
from pydantic import Field, BaseModel
4+
import tdfs4ds
5+
import logging
6+
from sqlalchemy.engine import Connection
7+
from sqlalchemy import text
8+
9+
10+
logger = logging.getLogger("teradata_mcp_server")
311

412
class FeatureStoreConfig(BaseModel):
513
"""
@@ -45,3 +53,52 @@ class FeatureStoreConfig(BaseModel):
4553
"Used to list and manage available datasets within the feature store."
4654
)
4755
)
56+
57+
def fs_setFeatureStoreConfig(
58+
self,
59+
conn: Connection,
60+
db_name: Optional[str] = None,
61+
data_domain: Optional[str] = None,
62+
entity: Optional[str] = None,
63+
) -> "FeatureStoreConfig":
64+
if db_name:
65+
if tdfs4ds.connect(database=db_name):
66+
logger.info(f"connected to the feature store of the {db_name} database")
67+
# Reset data_domain if DB name changes
68+
if not (self.db_name and self.db_name.upper() == db_name.upper()):
69+
self.data_domain = None
70+
71+
self.db_name = db_name
72+
logger.info(f"connected to the feature store of the {db_name} database")
73+
self.feature_catalog = f"{db_name}.{tdfs4ds.FEATURE_CATALOG_NAME_VIEW}"
74+
logger.info(f"feature catalog {self.feature_catalog}")
75+
self.process_catalog = f"{db_name}.{tdfs4ds.PROCESS_CATALOG_NAME_VIEW}"
76+
logger.info(f"process catalog {self.process_catalog}")
77+
self.dataset_catalog = f"{db_name}.FS_V_FS_DATASET_CATALOG" # <- fixed line
78+
logger.info(f"dataset catalog {self.dataset_catalog}")
79+
80+
if self.db_name is not None and data_domain is not None:
81+
stmt = text(
82+
f"SELECT COUNT(*) AS N FROM {self.feature_catalog} "
83+
"WHERE UPPER(data_domain)=:domain"
84+
)
85+
result = conn.execute(stmt, {"domain": data_domain.upper()})
86+
count = result.scalar_one_or_none() or 0
87+
logger.info("Found %d matching data_domain rows", count)
88+
if count > 0:
89+
self.data_domain = data_domain
90+
else:
91+
self.data_domain = None
92+
93+
if self.db_name is not None and self.data_domain is not None and entity is not None:
94+
stmt = text(
95+
f"SELECT COUNT(*) AS N FROM {self.feature_catalog} "
96+
"WHERE UPPER(data_domain)=:domain "
97+
"AND ENTITY_NAME=:entity"
98+
)
99+
result = conn.execute(stmt, {"domain": self.data_domain.upper(), "entity": entity.upper()})
100+
count = result.scalar_one_or_none() or 0
101+
logger.info("Found %d matching entity rows", count)
102+
if count > 0:
103+
self.entity = entity
104+
return self

0 commit comments

Comments
 (0)