Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/vertex-anthropic-provider.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'task-master-ai': minor
---

Add support for Vertex AI Anthropic provider to use Claude models through Google Vertex AI
21 changes: 21 additions & 0 deletions apps/cli/src/commands/models/custom-providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,27 @@ export const customProviderConfigs: Record<
return true;
}
},
VERTEX_ANTHROPIC: {
id: '__CUSTOM_VERTEX_ANTHROPIC__',
name: '* Custom Vertex Anthropic model',
provider: CUSTOM_PROVIDERS.VERTEX_ANTHROPIC,
promptMessage: (role) =>
`Enter the custom Vertex AI Anthropic Model ID for the ${role} role (e.g., claude-sonnet-4-6):`,
checkEnvVars: () => {
if (
!process.env.GOOGLE_API_KEY &&
!process.env.GOOGLE_APPLICATION_CREDENTIALS
) {
console.error(
chalk.red(
'Error: Either GOOGLE_API_KEY or GOOGLE_APPLICATION_CREDENTIALS environment variable is required. Please set one before using Vertex Anthropic models.'
)
);
return false;
}
return true;
}
},
LMSTUDIO: {
id: '__CUSTOM_LMSTUDIO__',
name: '* Custom LMStudio model',
Expand Down
1 change: 1 addition & 0 deletions apps/cli/src/commands/models/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ export const CUSTOM_PROVIDER_IDS = {
BEDROCK: '__CUSTOM_BEDROCK__',
AZURE: '__CUSTOM_AZURE__',
VERTEX: '__CUSTOM_VERTEX__',
VERTEX_ANTHROPIC: '__CUSTOM_VERTEX_ANTHROPIC__',
LMSTUDIO: '__CUSTOM_LMSTUDIO__',
OPENAI_COMPATIBLE: '__CUSTOM_OPENAI_COMPATIBLE__'
} as const;
Expand Down
1 change: 1 addition & 0 deletions packages/tm-core/src/common/constants/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ export type ValidatedProvider = (typeof VALIDATED_PROVIDERS)[number];
export const CUSTOM_PROVIDERS = {
AZURE: 'azure',
VERTEX: 'vertex',
VERTEX_ANTHROPIC: 'vertex-anthropic',
BEDROCK: 'bedrock',
OPENROUTER: 'openrouter',
OLLAMA: 'ollama',
Expand Down
9 changes: 7 additions & 2 deletions scripts/modules/ai-services-unified.js
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ import {
OpenRouterAIProvider,
PerplexityAIProvider,
VertexAIProvider,
VertexAnthropicProvider,
XAIProvider,
ZAICodingProvider,
ZAIProvider
Expand Down Expand Up @@ -81,6 +82,7 @@ const PROVIDERS = {
bedrock: new BedrockAIProvider(),
azure: new AzureProvider(),
vertex: new VertexAIProvider(),
'vertex-anthropic': new VertexAnthropicProvider(),
'claude-code': new ClaudeCodeProvider(),
'codex-cli': new CodexCliProvider(),
'gemini-cli': new GeminiCliProvider(),
Expand Down Expand Up @@ -627,8 +629,11 @@ async function _unifiedServiceRunner(serviceType, params) {
// Prepare provider-specific configuration
let providerSpecificParams = {};

// Handle Vertex AI specific configuration
if (providerName?.toLowerCase() === 'vertex') {
// Handle Vertex AI specific configuration (both vertex and vertex-anthropic)
if (
providerName?.toLowerCase() === 'vertex' ||
providerName?.toLowerCase() === 'vertex-anthropic'
) {
providerSpecificParams = _getVertexConfiguration(
effectiveProjectRoot,
session
Expand Down
7 changes: 6 additions & 1 deletion scripts/modules/task-manager/models.js
Original file line number Diff line number Diff line change
Expand Up @@ -583,6 +583,11 @@ async function setModel(role, modelId, options = {}) {
determinedProvider = CUSTOM_PROVIDERS.VERTEX;
warningMessage = `Warning: Custom Vertex AI model '${modelId}' set. Please ensure the model is valid and accessible in your Google Cloud project.`;
report('warn', warningMessage);
} else if (providerHint === CUSTOM_PROVIDERS.VERTEX_ANTHROPIC) {
// Set provider without model validation since Vertex Anthropic models are managed by Google Cloud
determinedProvider = CUSTOM_PROVIDERS.VERTEX_ANTHROPIC;
warningMessage = `Warning: Custom Vertex AI Anthropic model '${modelId}' set. Please ensure the model is valid and accessible in your Google Cloud project.`;
report('warn', warningMessage);
Comment on lines +586 to +590
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
rg -n -C3 '\bVERTEX_ANTHROPIC\b|vertex-anthropic' \
  scripts/modules/config-manager.js \
  scripts/modules/supported-models.json \
  scripts/modules/ai-services-unified.js \
  packages/tm-core/src/common/constants/providers.ts

Repository: eyaltoledano/claude-task-master

Length of output: 1990


🏁 Script executed:

#!/bin/bash
# Check config-manager.js for vertex-anthropic provider support
echo "=== Checking config-manager.js ==="
rg -n 'VALID_PROVIDERS|MODEL_MAP|keyMap|vertex-anthropic' scripts/modules/config-manager.js | head -50

# Check if vertex-anthropic is in supported-models.json
echo -e "\n=== Checking supported-models.json ==="
grep -n 'vertex-anthropic' scripts/modules/supported-models.json

Repository: eyaltoledano/claude-task-master

Length of output: 1228


🏁 Script executed:

#!/bin/bash
# View keyMap and surrounding context
echo "=== keyMap definition in config-manager.js ==="
sed -n '895,930p' scripts/modules/config-manager.js

# Check for VALID_PROVIDERS
echo -e "\n=== Checking for VALID_PROVIDERS ==="
rg -n 'VALID_PROVIDERS\s*=' scripts/modules/config-manager.js

# View supported-models.json structure
echo -e "\n=== supported-models.json structure (first 100 lines) ==="
head -100 scripts/modules/supported-models.json

Repository: eyaltoledano/claude-task-master

Length of output: 3425


🏁 Script executed:

#!/bin/bash
# Check full supported-models.json for vertex-anthropic
echo "=== Full search for vertex-anthropic in supported-models.json ==="
grep -c 'vertex-anthropic' scripts/modules/supported-models.json || echo "NOT FOUND"

# List all provider keys in supported-models.json
echo -e "\n=== All provider keys in supported-models.json ==="
grep -o '^\s*"[^"]*":' scripts/modules/supported-models.json | head -20

Repository: eyaltoledano/claude-task-master

Length of output: 528


🏁 Script executed:

#!/bin/bash
# Extract top-level provider keys from supported-models.json using jq
echo "=== Providers registered in supported-models.json ==="
jq 'keys' scripts/modules/supported-models.json

Repository: eyaltoledano/claude-task-master

Length of output: 353


Add vertex-anthropic to config-manager.js and supported-models.json registries.

Lines 586–590 allow users to set VERTEX_ANTHROPIC as the provider, but the integration is incomplete. The provider is missing from:

  1. keyMap in config-manager.js (lines 895–912) — isApiKeySet() and getMcpApiKeyStatus() will fail for this provider.
  2. supported-models.jsongetAvailableModels() will not discover vertex-anthropic models, and MODEL_MAP validation will not recognize the provider.

Users can persist the provider here but will encounter broken key-status checks and missing model metadata downstream. Add the provider to both registries with the appropriate API key mapping and model metadata.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@scripts/modules/task-manager/models.js` around lines 586 - 590, Add the
missing Vertex Anthropic provider to the registries: update the keyMap object in
config-manager.js to include an entry for CUSTOM_PROVIDERS.VERTEX_ANTHROPIC
mapping to the correct API key config so isApiKeySet() and getMcpApiKeyStatus()
can detect its key, and update supported-models.json to register
"vertex-anthropic" in MODEL_MAP and the list used by getAvailableModels() with
appropriate model metadata (display name, family, and any required fields) so
MODEL_MAP validation and model discovery recognize the provider.

} else if (providerHint === CUSTOM_PROVIDERS.GEMINI_CLI) {
// Gemini CLI provider - check if model exists in our list
determinedProvider = CUSTOM_PROVIDERS.GEMINI_CLI;
Expand Down Expand Up @@ -693,7 +698,7 @@ async function setModel(role, modelId, options = {}) {
success: false,
error: {
code: 'MODEL_NOT_FOUND_NO_HINT',
message: `Model ID "${modelId}" not found in Taskmaster's supported models. If this is a custom model, please specify the provider using --openrouter, --ollama, --bedrock, --azure, --vertex, --lmstudio, --openai-compatible, --gemini-cli, or --codex-cli.`
message: `Model ID "${modelId}" not found in Taskmaster's supported models. If this is a custom model, please specify the provider using --openrouter, --ollama, --bedrock, --azure, --vertex, --vertex-anthropic, --lmstudio, --openai-compatible, --gemini-cli, or --codex-cli.`
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Include --claude-code in the recovery hint.

setModel() still supports CUSTOM_PROVIDERS.CLAUDE_CODE, so this message is missing one valid way to disambiguate a custom model.

💡 Proposed fix
-						message: `Model ID "${modelId}" not found in Taskmaster's supported models. If this is a custom model, please specify the provider using --openrouter, --ollama, --bedrock, --azure, --vertex, --vertex-anthropic, --lmstudio, --openai-compatible, --gemini-cli, or --codex-cli.`
+						message: `Model ID "${modelId}" not found in Taskmaster's supported models. If this is a custom model, please specify the provider using --openrouter, --ollama, --bedrock, --azure, --vertex, --vertex-anthropic, --claude-code, --lmstudio, --openai-compatible, --gemini-cli, or --codex-cli.`
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@scripts/modules/task-manager/models.js` at line 701, The recovery hint for
unknown model IDs is missing the `--claude-code` option even though `setModel()`
supports CUSTOM_PROVIDERS.CLAUDE_CODE; update the message string (the template
that references modelId) to include `--claude-code` among the listed flags so
users can disambiguate custom Claude Code models.

Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

CLI flag --vertex-anthropic never defined, breaking advertised workflow

High Severity

The error message now tells users to specify --vertex-anthropic as a CLI flag, but this option is never defined in the Commander.js command setup in commands.js. There is no .option('--vertex-anthropic', ...) call, no entry in the providerFlags validation array, and no mapping in the providerHint ternary chain. Running task-master models --set-main claude-sonnet-4-6 --vertex-anthropic will fail because Commander doesn't recognize the flag. The interactive setup path works via custom-providers.ts, but the direct set workflow advertised in the PR description and error message is non-functional.

Fix in Cursor Fix in Web

}
};
}
Expand Down
59 changes: 59 additions & 0 deletions src/ai-providers/google-vertex-anthropic.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/**
* google-vertex-anthropic.js
* AI provider implementation for Anthropic models on Google Vertex AI using Vercel AI SDK.
* This provider uses the createVertexAnthropic client to route requests to the
* publishers/anthropic endpoint instead of publishers/google.
*
* Extends VertexAIProvider — only the client factory and display name differ.
*/

import { createVertexAnthropic } from '@ai-sdk/google-vertex/anthropic';
import { VertexAIProvider } from './google-vertex.js';

export class VertexAnthropicProvider extends VertexAIProvider {
constructor() {
super();
this.name = 'Google Vertex AI (Anthropic)';
}

/**
* Creates and returns a Google Vertex AI Anthropic client instance.
* Uses createVertexAnthropic to route to the publishers/anthropic endpoint.
* @param {object} params - Parameters for client initialization
* @param {string} [params.apiKey] - Google API key
* @param {string} params.projectId - Google Cloud project ID
* @param {string} params.location - Google Cloud location (e.g., "us-central1")
* @param {object} [params.credentials] - Service account credentials object
* @param {string} [params.baseURL] - Optional custom API endpoint
* @returns {Function} Google Vertex AI Anthropic client function
* @throws {Error} If required parameters are missing or initialization fails
*/
getClient(params) {
try {
const { apiKey, projectId, location, credentials, baseURL } = params;
const fetchImpl = this.createProxyFetch();

// Configure auth options - either API key or service account
const authOptions = {};
if (apiKey) {
authOptions.googleAuthOptions = {
...credentials,
apiKey
};
} else if (credentials) {
authOptions.googleAuthOptions = credentials;
}

// Return Vertex AI Anthropic client (publishers/anthropic endpoint)
return createVertexAnthropic({
...authOptions,
project: projectId,
location,
...(baseURL && { baseURL }),
...(fetchImpl && { fetch: fetchImpl })
});
} catch (error) {
this.handleError('client initialization', error);
}
}
}
1 change: 0 additions & 1 deletion src/ai-providers/google-vertex.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
*/

import { createVertex } from '@ai-sdk/google-vertex';
import { resolveEnvVariable } from '../../scripts/modules/utils.js';
import { log } from '../../scripts/modules/utils.js';
import { BaseAIProvider } from './base-provider.js';

Expand Down
1 change: 1 addition & 0 deletions src/ai-providers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ export { OllamaAIProvider } from './ollama.js';
export { BedrockAIProvider } from './bedrock.js';
export { AzureProvider } from './azure.js';
export { VertexAIProvider } from './google-vertex.js';
export { VertexAnthropicProvider } from './google-vertex-anthropic.js';
export { ClaudeCodeProvider } from './claude-code.js';
export { GeminiCliProvider } from './gemini-cli.js';
export { GrokCliProvider } from './grok-cli.js';
Expand Down
7 changes: 7 additions & 0 deletions tests/unit/ai-services-unified.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,13 @@ jest.unstable_mockModule('../../src/ai-providers/index.js', () => ({
getRequiredApiKeyName: jest.fn(() => null),
isRequiredApiKey: jest.fn(() => false)
})),
VertexAnthropicProvider: jest.fn(() => ({
generateText: jest.fn(),
streamText: jest.fn(),
generateObject: jest.fn(),
getRequiredApiKeyName: jest.fn(() => null),
isRequiredApiKey: jest.fn(() => false)
})),
ClaudeCodeProvider: jest.fn(() => mockClaudeProvider),
GeminiCliProvider: jest.fn(() => ({
generateText: jest.fn(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ jest.unstable_mockModule('@tm/core', () => ({
CLAUDE_CODE: 'claude-code',
AZURE: 'azure',
VERTEX: 'vertex',
VERTEX_ANTHROPIC: 'vertex-anthropic',
GEMINI_CLI: 'gemini-cli',
CODEX_CLI: 'codex-cli',
OPENAI_COMPATIBLE: 'openai-compatible'
Expand Down
Loading