Skip to content

Commit 5167bbb

Browse files
committed
Fixed supervisor skills. Enhanced re-routing. Fixed custom skills loading.
1 parent 9a4afdc commit 5167bbb

File tree

1 file changed

+80
-11
lines changed

1 file changed

+80
-11
lines changed

vector_mcp/utils.py

Lines changed: 80 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -5,22 +5,27 @@
55
import httpx
66
import pickle
77
import yaml
8-
from typing import Optional
9-
8+
import logging
109
from pathlib import Path
11-
from typing import Any, Union, List
10+
from typing import Union, List, Any, Optional
1211
import json
1312
from importlib.resources import files, as_file
1413
from pydantic_ai.models.openai import OpenAIChatModel
15-
from pydantic_ai.models.anthropic import AnthropicModel
1614
from pydantic_ai.models.google import GoogleModel
1715
from pydantic_ai.models.huggingface import HuggingFaceModel
1816
from pydantic_ai.models.groq import GroqModel
1917
from pydantic_ai.models.mistral import MistralModel
2018
from fasta2a import Skill
2119

20+
from llama_index.core.embeddings import BaseEmbedding
21+
from llama_index.embeddings.openai import OpenAIEmbedding
22+
2223
try:
24+
from llama_index.embeddings.ollama import OllamaEmbedding
25+
except ImportError:
26+
OllamaEmbedding = None
2327

28+
try:
2429
from openai import AsyncOpenAI
2530
from pydantic_ai.providers.openai import OpenAIProvider
2631
except ImportError:
@@ -42,20 +47,16 @@
4247
MistralProvider = None
4348

4449
try:
50+
from pydantic_ai.models.anthropic import AnthropicModel
4551
from anthropic import AsyncAnthropic
4652
from pydantic_ai.providers.anthropic import AnthropicProvider
4753
except ImportError:
54+
AnthropicModel = None
4855
AsyncAnthropic = None
4956
AnthropicProvider = None
5057

5158

52-
from llama_index.core.embeddings import BaseEmbedding
53-
from llama_index.embeddings.openai import OpenAIEmbedding
54-
55-
try:
56-
from llama_index.embeddings.ollama import OllamaEmbedding
57-
except ImportError:
58-
OllamaEmbedding = None
59+
logger = logging.getLogger(__name__)
5960

6061

6162
def to_integer(string: Union[str, int] = None) -> int:
@@ -373,6 +374,74 @@ def create_model(
373374
return OpenAIChatModel(model_name=model_id, provider="openai")
374375

375376

377+
def extract_tool_tags(tool_def: Any) -> List[str]:
378+
"""
379+
Extracts tags from a tool definition object.
380+
381+
Found structure in debug:
382+
tool_def.name (str)
383+
tool_def.meta (dict) -> {'fastmcp': {'tags': ['tag']}}
384+
385+
This function checks multiple paths to be robust:
386+
1. tool_def.meta['fastmcp']['tags']
387+
2. tool_def.meta['tags']
388+
3. tool_def.metadata['tags'] (legacy/alternative wrapper)
389+
4. tool_def.metadata.get('meta')... (nested path)
390+
"""
391+
tags_list = []
392+
393+
meta = getattr(tool_def, "meta", None)
394+
if isinstance(meta, dict):
395+
fastmcp = meta.get("fastmcp") or meta.get("_fastmcp") or {}
396+
tags_list = fastmcp.get("tags", [])
397+
if tags_list:
398+
return tags_list
399+
400+
tags_list = meta.get("tags", [])
401+
if tags_list:
402+
return tags_list
403+
404+
metadata = getattr(tool_def, "metadata", None)
405+
if isinstance(metadata, dict):
406+
tags_list = metadata.get("tags", [])
407+
if tags_list:
408+
return tags_list
409+
410+
meta_nested = metadata.get("meta") or {}
411+
fastmcp = meta_nested.get("fastmcp") or meta_nested.get("_fastmcp") or {}
412+
tags_list = fastmcp.get("tags", [])
413+
if tags_list:
414+
return tags_list
415+
416+
tags_list = meta_nested.get("tags", [])
417+
if tags_list:
418+
return tags_list
419+
420+
tags_list = getattr(tool_def, "tags", [])
421+
if isinstance(tags_list, list) and tags_list:
422+
return tags_list
423+
424+
return []
425+
426+
427+
def tool_in_tag(tool_def: Any, tag: str) -> bool:
428+
"""
429+
Checks if a tool belongs to a specific tag.
430+
"""
431+
tool_tags = extract_tool_tags(tool_def)
432+
if tag in tool_tags:
433+
return True
434+
else:
435+
return False
436+
437+
438+
def filter_tools_by_tag(tools: List[Any], tag: str) -> List[Any]:
439+
"""
440+
Filters a list of tools for a given tag.
441+
"""
442+
return [t for t in tools if tool_in_tag(t, tag)]
443+
444+
376445
def get_embedding_model() -> BaseEmbedding:
377446
"""
378447
Get the embedding model based on environment variables.

0 commit comments

Comments
 (0)