@@ -204,7 +204,7 @@ mcpServers:
204204 DEBUG : " ${env://DEBUG:-false}"
205205 LOG_LEVEL : " ${env://LOG_LEVEL:-info}"
206206
207- model : " ${env://MODEL:-anthropic: claude-sonnet-4-20250514 }"
207+ model : " ${env://MODEL:-anthropic/ claude-sonnet-4-5-20250929 }"
208208provider-api-key : " ${env://OPENAI_API_KEY}" # Required - will fail if not set
209209` ` `
210210
@@ -216,7 +216,7 @@ export OPENAI_API_KEY="your_openai_key"
216216
217217# Optionally override defaults
218218export DEBUG="true"
219- export MODEL="openai: gpt-4"
219+ export MODEL="openai/ gpt-4"
220220
221221# Run mcphost
222222mcphost
@@ -584,7 +584,7 @@ mcpServers:
584584 type : " local"
585585 command : ["npx", "-y", "@modelcontextprotocol/server-filesystem", "${env://WORK_DIR:-/tmp}"]
586586
587- model : " ${env://MODEL:-anthropic: claude-sonnet-4-20250514 }"
587+ model : " ${env://MODEL:-anthropic/ claude-sonnet-4-5-20250929 }"
588588---
589589Hello ${name:-World}! Please list ${repo_type:-public} repositories for user ${username}.
590590Working directory is ${env://WORK_DIR:-/tmp}.
@@ -723,7 +723,7 @@ mcphost -p "What is the weather like today?"
723723mcphost -p "What is 2+2?" --quiet
724724
725725# Use with different models
726- mcphost -m ollama: qwen2.5:3b -p "Explain quantum computing" --quiet
726+ mcphost -m ollama/ qwen2.5:3b -p "Explain quantum computing" --quiet
727727` ` `
728728
729729# ## Model Generation Parameters
@@ -751,24 +751,24 @@ These parameters work with all supported providers (OpenAI, Anthropic, Google, O
751751
752752### Available Models
753753Models can be specified using the ` --model ` (` -m ` ) flag:
754- - ** Anthropic Claude** (default): ` anthropic: claude-sonnet-4-20250514 ` , ` anthropic: claude-3-5-sonnet-latest ` , ` anthropic: claude-3-5-haiku-latest `
755- - ** OpenAI** : ` openai: gpt-4 ` , ` openai: gpt-4-turbo ` , ` openai: gpt-3.5-turbo `
756- - ** Google Gemini** : ` google: gemini-2.0-flash ` , ` google: gemini-1.5-pro `
757- - ** Ollama models** : ` ollama: llama3.2 ` , ` ollama: qwen2.5:3b ` , ` ollama: mistral `
754+ - ** Anthropic Claude** (default): ` anthropic/ claude-sonnet-4-5-20250929 ` , ` anthropic/ claude-3-5-sonnet-latest ` , ` anthropic/ claude-3-5-haiku-latest `
755+ - ** OpenAI** : ` openai/ gpt-4 ` , ` openai/ gpt-4-turbo ` , ` openai/ gpt-3.5-turbo `
756+ - ** Google Gemini** : ` google/ gemini-2.0-flash ` , ` google/ gemini-1.5-pro `
757+ - ** Ollama models** : ` ollama/ llama3.2 ` , ` ollama/ qwen2.5:3b ` , ` ollama/ mistral `
758758- ** OpenAI-compatible** : Any model via custom endpoint with ` --provider-url `
759759
760760### Examples
761761
762762#### Interactive Mode
763763``` bash
764764# Use Ollama with Qwen model
765- mcphost -m ollama: qwen2.5:3b
765+ mcphost -m ollama/ qwen2.5:3b
766766
767767# Use OpenAI's GPT-4
768- mcphost -m openai: gpt-4
768+ mcphost -m openai/ gpt-4
769769
770770# Use OpenAI-compatible model with custom URL and API key
771- mcphost --model openai: < your-model-name> \
771+ mcphost --model openai/ < your-model-name> \
772772--provider-url < your-base-url> \
773773--provider-api-key < your-api-key>
774774```
@@ -800,7 +800,7 @@ mcphost -p "Generate a random UUID" --quiet | tr '[:lower:]' '[:upper:]'
800800- ` --system-prompt string ` : system-prompt file location
801801- ` --debug ` : Enable debug logging
802802- ` --max-steps int ` : Maximum number of agent steps (0 for unlimited, default: 0)
803- - ` -m, --model string ` : Model to use (format: provider: model ) (default "anthropic: claude-sonnet-4-20250514 ")
803+ - ` -m, --model string ` : Model to use (format: provider/ model) (default "anthropic/ claude-sonnet-4-5-20250929 ")
804804- ` -p, --prompt string ` : ** Run in non-interactive mode with the given prompt**
805805- ` --quiet ` : ** Suppress all output except the AI response (only works with --prompt)**
806806- ` --compact ` : ** Enable compact output mode without fancy styling (ideal for scripting and automation)**
@@ -845,7 +845,7 @@ mcpServers:
845845 url : " https://api.example.com/mcp"
846846
847847# Application settings
848- model : " anthropic: claude-sonnet-4-20250514 "
848+ model : " anthropic/ claude-sonnet-4-5-20250929 "
849849max-steps : 20
850850debug : false
851851system-prompt : " /path/to/system-prompt.txt"
0 commit comments