Skip to content

Commit cd972ba

Browse files
author
Zvi Fried
committed
fixes
1 parent c3d0f47 commit cd972ba

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

48 files changed

+988
-1590
lines changed
File renamed without changes.

src/mcp_as_a_judge/core/server_helpers.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from mcp.server.fastmcp import Context
1111
from pydantic import BaseModel, Field
1212

13-
from mcp_as_a_judge.constants import MAX_TOKENS
13+
from mcp_as_a_judge.core.constants import MAX_TOKENS
1414
from mcp_as_a_judge.core.logging_config import get_logger
1515
from mcp_as_a_judge.llm.llm_integration import load_llm_config_from_env
1616
from mcp_as_a_judge.messaging.llm_provider import llm_provider
@@ -228,3 +228,6 @@ def create_pydantic_model_from_fields(fields_dict: dict) -> type[BaseModel]:
228228
)
229229

230230
return dynamic_elicitation_model
231+
232+
233+
# (Removed rule-based decision extraction and gating to keep HITL LLM-driven)

src/mcp_as_a_judge/db/cleanup_service.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from sqlalchemy import Engine, func
99
from sqlmodel import Session, select
1010

11-
from mcp_as_a_judge.constants import MAX_TOTAL_SESSIONS
11+
from mcp_as_a_judge.core.constants import MAX_TOTAL_SESSIONS
1212
from mcp_as_a_judge.core.logging_config import get_logger
1313
from mcp_as_a_judge.db.interface import ConversationRecord
1414

src/mcp_as_a_judge/db/db_config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
and configuration classes.
66
"""
77

8-
from mcp_as_a_judge.constants import (
8+
from mcp_as_a_judge.core.constants import (
99
DATABASE_URL,
1010
MAX_SESSION_RECORDS,
1111
MAX_TOTAL_SESSIONS,

src/mcp_as_a_judge/db/dynamic_token_limits.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
from dataclasses import dataclass
1010

11-
from mcp_as_a_judge.constants import MAX_CONTEXT_TOKENS, MAX_RESPONSE_TOKENS
11+
from mcp_as_a_judge.core.constants import MAX_CONTEXT_TOKENS, MAX_RESPONSE_TOKENS
1212
from mcp_as_a_judge.core.logging_config import get_logger
1313

1414
# Set up logger

src/mcp_as_a_judge/db/providers/sqlite_provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from sqlalchemy import create_engine, func
1212
from sqlmodel import Session, SQLModel, asc, desc, select
1313

14-
from mcp_as_a_judge.constants import MAX_CONTEXT_TOKENS
14+
from mcp_as_a_judge.core.constants import MAX_CONTEXT_TOKENS
1515
from mcp_as_a_judge.core.logging_config import get_logger
1616
from mcp_as_a_judge.db.cleanup_service import ConversationCleanupService
1717
from mcp_as_a_judge.db.interface import ConversationHistoryDB, ConversationRecord

src/mcp_as_a_judge/db/token_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ async def filter_records_by_token_limit(
182182
return []
183183

184184
# Use configured MAX_CONTEXT_TOKENS for filtering
185-
from mcp_as_a_judge.constants import MAX_CONTEXT_TOKENS as _MAX
185+
from mcp_as_a_judge.core.constants import MAX_CONTEXT_TOKENS as _MAX
186186

187187
context_limit = _MAX
188188

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
"""
2+
Elicitation provider package.
3+
4+
This package provides a factory pattern for creating elicitation providers
5+
that handle user input elicitation through various methods (MCP elicitation,
6+
fallback prompts, etc.).
7+
"""
8+
9+
from mcp_as_a_judge.elicitation.factory import ElicitationProviderFactory, elicitation_provider
10+
from mcp_as_a_judge.elicitation.fallback_provider import FallbackElicitationProvider
11+
from mcp_as_a_judge.elicitation.interface import ElicitationProvider, ElicitationResult
12+
from mcp_as_a_judge.elicitation.mcp_provider import MCPElicitationProvider
13+
14+
__all__ = [
15+
"ElicitationProvider",
16+
"ElicitationResult",
17+
"MCPElicitationProvider",
18+
"FallbackElicitationProvider",
19+
"ElicitationProviderFactory",
20+
"elicitation_provider",
21+
]
Lines changed: 84 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,84 @@
1+
"""
2+
Elicitation provider factory for smart provider selection.
3+
4+
This module implements the factory pattern for creating the appropriate
5+
elicitation provider based on capabilities and preferences. It automatically
6+
detects MCP elicitation capability and selects the best available provider.
7+
"""
8+
9+
from mcp.server.fastmcp import Context
10+
from pydantic import BaseModel
11+
12+
from mcp_as_a_judge.elicitation.fallback_provider import FallbackElicitationProvider
13+
from mcp_as_a_judge.elicitation.interface import ElicitationResult
14+
from mcp_as_a_judge.elicitation.mcp_provider import MCPElicitationProvider
15+
16+
17+
class ElicitationProviderFactory:
18+
"""
19+
Unified elicitation provider that automatically selects the best available method.
20+
21+
Similar to the messaging provider, this checks for elicitation capability and
22+
provides appropriate fallbacks when not available.
23+
"""
24+
25+
def __init__(self, prefer_elicitation: bool = True):
26+
"""
27+
Initialize the elicitation provider factory.
28+
29+
Args:
30+
prefer_elicitation: Whether to prefer MCP elicitation when available
31+
"""
32+
self.prefer_elicitation = prefer_elicitation
33+
self._mcp_provider = MCPElicitationProvider()
34+
self._fallback_provider = FallbackElicitationProvider()
35+
36+
async def elicit_user_input(
37+
self, message: str, schema: type[BaseModel], ctx: Context
38+
) -> ElicitationResult:
39+
"""
40+
Elicit user input using the best available method.
41+
42+
Args:
43+
message: Message to display to the user
44+
schema: Pydantic model schema defining expected fields
45+
ctx: MCP context
46+
47+
Returns:
48+
ElicitationResult with success status and data/message
49+
"""
50+
51+
# Check if MCP elicitation is available and preferred
52+
if self.prefer_elicitation and self._mcp_provider.check_capability(ctx):
53+
result = await self._mcp_provider.elicit(message, schema, ctx)
54+
55+
# If MCP elicitation succeeds, return the result
56+
if result.success:
57+
return result
58+
59+
# Use fallback provider
60+
return await self._fallback_provider.elicit(message, schema, ctx)
61+
62+
def get_available_providers(self, ctx: Context) -> dict[str, dict[str, object]]:
63+
"""Get information about all available providers.
64+
65+
Args:
66+
ctx: MCP context for capability checking
67+
68+
Returns:
69+
Dictionary with provider availability information
70+
"""
71+
return {
72+
"mcp_elicitation": {
73+
"available": self._mcp_provider.check_capability(ctx),
74+
"provider_type": self._mcp_provider.provider_type,
75+
},
76+
"fallback_elicitation": {
77+
"available": True, # Always available
78+
"provider_type": self._fallback_provider.provider_type,
79+
}
80+
}
81+
82+
83+
# Global elicitation provider instance
84+
elicitation_provider = ElicitationProviderFactory(prefer_elicitation=True)
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
"""
2+
Fallback elicitation provider that returns a message for the AI assistant.
3+
4+
This provider generates a structured message that the AI assistant can use
5+
to prompt the user when MCP elicitation is not available.
6+
"""
7+
8+
from mcp.server.fastmcp import Context
9+
from pydantic import BaseModel
10+
11+
from mcp_as_a_judge.elicitation.interface import ElicitationProvider, ElicitationResult
12+
from mcp_as_a_judge.models import ElicitationFallbackUserVars
13+
from mcp_as_a_judge.prompting.loader import prompt_loader
14+
15+
16+
class FallbackElicitationProvider(ElicitationProvider):
17+
"""Fallback provider that returns a message for the AI assistant to prompt the user."""
18+
19+
@property
20+
def provider_type(self) -> str:
21+
"""Return the provider type identifier."""
22+
return "fallback_elicitation"
23+
24+
async def _elicit(
25+
self, message: str, schema: type[BaseModel], ctx: Context
26+
) -> ElicitationResult:
27+
"""Generate a fallback message for the AI assistant to prompt the user.
28+
29+
Args:
30+
message: Original message to display to the user
31+
schema: Pydantic model schema defining expected fields
32+
ctx: MCP context
33+
34+
Returns:
35+
ElicitationResult with success=False and a formatted message
36+
"""
37+
38+
# Extract field information from the schema
39+
required_fields = []
40+
optional_fields = []
41+
42+
for field_name, field_info in schema.model_fields.items():
43+
field_desc = field_info.description or field_name.replace("_", " ").title()
44+
45+
if field_info.is_required():
46+
required_fields.append(f"- **{field_desc}**")
47+
else:
48+
optional_fields.append(f"- **{field_desc}**")
49+
50+
# Create template variables
51+
template_vars = ElicitationFallbackUserVars(
52+
original_message=message,
53+
required_fields=required_fields,
54+
optional_fields=optional_fields,
55+
)
56+
57+
# Generate fallback message using prompt template
58+
fallback_message = prompt_loader.render_prompt(
59+
"user/elicitation_fallback.md",
60+
**template_vars.model_dump(exclude_none=True),
61+
)
62+
63+
return ElicitationResult(success=False, message=fallback_message)

0 commit comments

Comments
 (0)