33import json
44import logging
55from asyncio .exceptions import CancelledError
6- from collections .abc import Callable
76from dataclasses import dataclass
87from typing import Any , cast
98
@@ -43,17 +42,24 @@ class FabricMCP(FastMCP[None]):
4342 def __init__ (self , log_level : str = "INFO" ):
4443 """Initialize the MCP server with a model."""
4544 super ().__init__ (f"Fabric MCP v{ __version__ } " )
46- self .mcp = self
4745 self .logger = logging .getLogger (__name__ )
48- self .__tools : list [Callable [..., Any ]] = []
4946 self .log_level = log_level
5047
5148 # Load default model configuration from Fabric environment
5249 self ._default_model : str | None = None
5350 self ._default_vendor : str | None = None
5451 self ._load_default_config ()
5552
56- self ._register_tools ()
53+ # Explicitly register tool methods
54+ for fn in (
55+ self .fabric_list_patterns ,
56+ self .fabric_get_pattern_details ,
57+ self .fabric_run_pattern ,
58+ self .fabric_list_models ,
59+ self .fabric_list_strategies ,
60+ self .fabric_get_configuration ,
61+ ):
62+ self .tool (fn )
5763
5864 def _load_default_config (self ) -> None :
5965 """Load default model configuration from Fabric environment.
@@ -158,176 +164,154 @@ def _make_fabric_api_request(
158164 )
159165 ) from e
160166
161- def _register_tools (self ):
162- """Register all MCP tools with the server."""
167+ def fabric_list_patterns (self ) -> list [str ]:
168+ """Return a list of available fabric patterns."""
169+ # Use helper method for API request
170+ response_data = self ._make_fabric_api_request (
171+ "/patterns/names" , operation = "retrieving patterns"
172+ )
163173
164- @self .tool ()
165- def fabric_list_patterns () -> list [str ]:
166- """Return a list of available fabric patterns."""
167- # Use helper method for API request
168- response_data = self ._make_fabric_api_request (
169- "/patterns/names" , operation = "retrieving patterns"
174+ # Validate response is a list
175+ if not isinstance (response_data , list ):
176+ error_msg = "Invalid response format from Fabric API: expected list"
177+ raise McpError (
178+ ErrorData (code = - 32603 , message = error_msg ) # Internal error
170179 )
171180
172- # Validate response is a list
173- if not isinstance (response_data , list ):
174- error_msg = "Invalid response format from Fabric API: expected list"
175- raise McpError (
176- ErrorData (code = - 32603 , message = error_msg ) # Internal error
177- )
181+ # Ensure all items are strings
182+ validated_patterns : list [str ] = []
183+ for item in response_data : # type: ignore[misc]
184+ if isinstance (item , str ):
185+ validated_patterns .append (item )
186+ else :
187+ # Log warning but continue with valid patterns
188+ item_any = cast (Any , item )
189+ item_type = type (item_any ).__name__ if item_any is not None else "None"
190+ self .logger .warning ("Non-string pattern name found: %s" , item_type )
191+
192+ return validated_patterns
193+
194+ def fabric_get_pattern_details (self , pattern_name : str ) -> dict [str , str ]:
195+ """Retrieve detailed information for a specific Fabric pattern."""
196+ # Use helper method for API request with pattern-specific error handling
197+ response_data = self ._make_fabric_api_request (
198+ f"/patterns/{ pattern_name } " ,
199+ pattern_name = pattern_name ,
200+ operation = "retrieving pattern details" ,
201+ )
202+
203+ # Transform Fabric API response to MCP expected format
204+ details = {
205+ "name" : response_data .get ("Name" , "" ),
206+ "description" : response_data .get ("Description" , "" ),
207+ "system_prompt" : response_data .get ("Pattern" , "" ),
208+ }
178209
179- # Ensure all items are strings
180- validated_patterns : list [str ] = []
181- for item in response_data : # type: ignore[misc]
182- if isinstance (item , str ):
183- validated_patterns .append (item )
184- else :
185- # Log warning but continue with valid patterns
186- item_any = cast (Any , item )
187- item_type = (
188- type (item_any ).__name__ if item_any is not None else "None"
189- )
190- self .logger .warning ("Non-string pattern name found: %s" , item_type )
210+ return details
191211
192- return validated_patterns
212+ def fabric_run_pattern (
213+ self ,
214+ pattern_name : str ,
215+ input_text : str = "" ,
216+ stream : bool = False , # Will be used later; pylint: disable=unused-argument
217+ config : PatternExecutionConfig | None = None ,
218+ ) -> dict [Any , Any ]:
219+ """
220+ Execute a Fabric pattern with input text and return complete output.
193221
194- self .__tools .append (fabric_list_patterns )
222+ This tool calls the Fabric API's /chat endpoint to execute a named pattern
223+ with the provided input text. Returns the complete LLM-generated output
224+ in a non-streaming manner (streaming parameter is ignored in this version).
195225
196- @self .tool ()
197- def fabric_get_pattern_details (pattern_name : str ) -> dict [str , str ]:
198- """Retrieve detailed information for a specific Fabric pattern."""
199- # Use helper method for API request with pattern-specific error handling
200- response_data = self ._make_fabric_api_request (
201- f"/patterns/{ pattern_name } " ,
202- pattern_name = pattern_name ,
203- operation = "retrieving pattern details" ,
204- )
226+ Args:
227+ pattern_name: The name of the fabric pattern to run (required).
228+ input_text: The input text to be processed by the pattern (optional).
229+ stream: Whether to stream the output (ignored, always non-streaming).
230+ config: Optional configuration for execution parameters.
205231
206- # Transform Fabric API response to MCP expected format
207- details = {
208- "name" : response_data .get ("Name" , "" ),
209- "description" : response_data .get ("Description" , "" ),
210- "system_prompt" : response_data .get ("Pattern" , "" ),
211- }
212-
213- return details
214-
215- self .__tools .append (fabric_get_pattern_details )
216-
217- @self .tool ()
218- def fabric_run_pattern (
219- pattern_name : str ,
220- input_text : str = "" ,
221- stream : bool = False , # Will be used later; pylint: disable=unused-argument
222- config : PatternExecutionConfig | None = None ,
223- ) -> dict [Any , Any ]:
224- """
225- Execute a Fabric pattern with input text and return complete output.
226-
227- This tool calls the Fabric API's /chat endpoint to execute a named pattern
228- with the provided input text. Returns the complete LLM-generated output
229- in a non-streaming manner (streaming parameter is ignored in this version).
230-
231- Args:
232- pattern_name: The name of the fabric pattern to run (required).
233- input_text: The input text to be processed by the pattern (optional).
234- stream: Whether to stream the output (ignored, always non-streaming).
235- config: Optional configuration for execution parameters.
236-
237- Returns:
238- dict[Any, Any]: Contains 'output_format' and 'output_text' fields.
239-
240- Raises:
241- McpError: For any API errors, connection issues, or parsing problems.
242- """
243- _ = stream # TODO: #36 remove this later when streaming is implemented
244- try :
245- return self ._execute_fabric_pattern (pattern_name , input_text , config )
246- except RuntimeError as e :
247- error_message = str (e )
248- # Check for pattern not found (500 with file not found message)
249- if (
250- "Fabric API returned error 500" in error_message
251- and "no such file or directory" in error_message
252- ):
253- raise McpError (
254- ErrorData (
255- code = - 32602 , # Invalid params - pattern doesn't exist
256- message = f"Pattern '{ pattern_name } ' not found" ,
257- )
258- ) from e
259- # Check for other HTTP status errors
260- if "Fabric API returned error" in error_message :
261- raise McpError (
262- ErrorData (
263- code = - 32603 , # Internal error
264- message = f"Error executing pattern '{ pattern_name } ': { e } " ,
265- )
266- ) from e
267- # Other runtime errors
232+ Returns:
233+ dict[Any, Any]: Contains 'output_format' and 'output_text' fields.
234+
235+ Raises:
236+ McpError: For any API errors, connection issues, or parsing problems.
237+ """
238+ _ = stream # TODO: #36 remove this later when streaming is implemented
239+ try :
240+ return self ._execute_fabric_pattern (pattern_name , input_text , config )
241+ except RuntimeError as e :
242+ error_message = str (e )
243+ # Check for pattern not found (500 with file not found message)
244+ if (
245+ "Fabric API returned error 500" in error_message
246+ and "no such file or directory" in error_message
247+ ):
268248 raise McpError (
269249 ErrorData (
270- code = - 32603 , # Internal error
271- message = f"Error executing pattern '{ pattern_name } ': { e } " ,
250+ code = - 32602 , # Invalid params - pattern doesn't exist
251+ message = f"Pattern '{ pattern_name } ' not found " ,
272252 )
273253 ) from e
274- except ConnectionError as e :
254+ # Check for other HTTP status errors
255+ if "Fabric API returned error" in error_message :
275256 raise McpError (
276257 ErrorData (
277258 code = - 32603 , # Internal error
278259 message = f"Error executing pattern '{ pattern_name } ': { e } " ,
279260 )
280261 ) from e
262+ # Other runtime errors
263+ raise McpError (
264+ ErrorData (
265+ code = - 32603 , # Internal error
266+ message = f"Error executing pattern '{ pattern_name } ': { e } " ,
267+ )
268+ ) from e
269+ except ConnectionError as e :
270+ raise McpError (
271+ ErrorData (
272+ code = - 32603 , # Internal error
273+ message = f"Error executing pattern '{ pattern_name } ': { e } " ,
274+ )
275+ ) from e
276+
277+ def fabric_list_models (self ) -> dict [Any , Any ]:
278+ """Retrieve configured Fabric models by vendor."""
279+ # This is a placeholder for the actual implementation
280+ return {
281+ "models" : ["gpt-4o" , "gpt-3.5-turbo" , "claude-3-opus" ],
282+ "vendors" : {
283+ "openai" : ["gpt-4o" , "gpt-3.5-turbo" ],
284+ "anthropic" : ["claude-3-opus" ],
285+ },
286+ }
281287
282- self .__tools .append (fabric_run_pattern )
283-
284- @self .tool ()
285- def fabric_list_models () -> dict [Any , Any ]:
286- """Retrieve configured Fabric models by vendor."""
287- # This is a placeholder for the actual implementation
288- return {
289- "models" : ["gpt-4o" , "gpt-3.5-turbo" , "claude-3-opus" ],
290- "vendors" : {
291- "openai" : ["gpt-4o" , "gpt-3.5-turbo" ],
292- "anthropic" : ["claude-3-opus" ],
288+ def fabric_list_strategies (self ) -> dict [Any , Any ]:
289+ """Retrieve available Fabric strategies."""
290+ # This is a placeholder for the actual implementation
291+ return {
292+ "strategies" : [
293+ {
294+ "name" : "default" ,
295+ "description" : "Default strategy for pattern execution" ,
296+ "prompt" : "Execute the pattern with default settings" ,
293297 },
294- }
295-
296- self .__tools .append (fabric_list_models )
297-
298- @self .tool ()
299- def fabric_list_strategies () -> dict [Any , Any ]:
300- """Retrieve available Fabric strategies."""
301- # This is a placeholder for the actual implementation
302- return {
303- "strategies" : [
304- {
305- "name" : "default" ,
306- "description" : "Default strategy for pattern execution" ,
307- "prompt" : "Execute the pattern with default settings" ,
308- },
309- {
310- "name" : "creative" ,
311- "description" : "Creative strategy with higher temperature" ,
312- "prompt" : "Execute the pattern with creative parameters" ,
313- },
314- ]
315- }
316-
317- self .__tools .append (fabric_list_strategies )
318-
319- @self .tool ()
320- def fabric_get_configuration () -> dict [Any , Any ]:
321- """Retrieve Fabric configuration with sensitive values redacted."""
322- # This is a placeholder for the actual implementation
323- return {
324- "openai_api_key" : "[REDACTED_BY_MCP_SERVER]" ,
325- "ollama_url" : "http://localhost:11434" ,
326- "anthropic_api_key" : "[REDACTED_BY_MCP_SERVER]" ,
327- "fabric_config_dir" : "~/.config/fabric" ,
328- }
329-
330- self .__tools .append (fabric_get_configuration )
298+ {
299+ "name" : "creative" ,
300+ "description" : "Creative strategy with higher temperature" ,
301+ "prompt" : "Execute the pattern with creative parameters" ,
302+ },
303+ ]
304+ }
305+
306+ def fabric_get_configuration (self ) -> dict [Any , Any ]:
307+ """Retrieve Fabric configuration with sensitive values redacted."""
308+ # This is a placeholder for the actual implementation
309+ return {
310+ "openai_api_key" : "[REDACTED_BY_MCP_SERVER]" ,
311+ "ollama_url" : "http://localhost:11434" ,
312+ "anthropic_api_key" : "[REDACTED_BY_MCP_SERVER]" ,
313+ "fabric_config_dir" : "~/.config/fabric" ,
314+ }
331315
332316 def http_streamable (
333317 self ,
@@ -337,9 +321,7 @@ def http_streamable(
337321 ):
338322 """Run the MCP server with StreamableHttpTransport."""
339323 try :
340- self .mcp .run (
341- transport = "streamable-http" , host = host , port = port , path = mcp_path
342- )
324+ self .run (transport = "streamable-http" , host = host , port = port , path = mcp_path )
343325 except (KeyboardInterrupt , CancelledError , WouldBlock ) as e :
344326 # Handle graceful shutdown
345327 self .logger .debug ("Exception details: %s: %s" , type (e ).__name__ , e )
@@ -348,7 +330,7 @@ def http_streamable(
348330 def stdio (self ):
349331 """Run the MCP server."""
350332 try :
351- self .mcp . run ()
333+ self .run ()
352334 except (KeyboardInterrupt , CancelledError , WouldBlock ):
353335 # Handle graceful shutdown
354336 self .logger .info ("Server stopped by user." )
0 commit comments