Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,9 @@ jobs:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }}
LLAMA_BASE_URL: http://127.0.0.1:11434
LLAMA_MODEL: llama3.2:3b
AWS_BEARER_TOKEN_BEDROCK: ${{ secrets.AWS_BEARER_TOKEN_BEDROCK }}
GCP_VERTEX_API_KEY: ${{ secrets.GCP_VERTEX_API_KEY }}
AZURE_AI_API_KEY: ${{ secrets.AZURE_AI_API_KEY }}
85 changes: 85 additions & 0 deletions src/llms/cohere.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import OpenAI from "openai";
import { LLMHandle, LLMToolResult, ToolDefinition } from "./types";
import {
createOpenAICompatibleTools,
parseOpenAICompatibleResponse,
} from "./utils.js";

export type CohereAIOptions = {
temperature?: number;
max_tokens?: number;
stop_sequences?: string[];
seed?: number;
frequency_penalty?: number;
presence_penalty?: number;
k?: number;
p?: number;
};

export type CohereConfig = {
apiKey: string;
model: string;
baseUrl?: string;
options?: CohereAIOptions;
};

export function llmCohere(cfg: CohereConfig): LLMHandle {
if (!cfg.model) {
throw new Error(
"llmCohere: Missing required 'model' parameter. " +
"Please specify which Cohere model to use. " +
"Example: llmCohere({ apiKey: '<YOUR-API-KEY>', model: 'command-a-03-2025' }"
);
}
const model = cfg.model;
const id = `Cohere-${model}`;
if (cfg.baseUrl === undefined) {
// To use OpenAI Compatibility API
// see: https://docs.cohere.com/reference/chat
cfg.baseUrl = "https://api.cohere.ai/compatibility/v1";
}
const client = new OpenAI({ apiKey: cfg.apiKey, baseURL: cfg.baseUrl });
const options = cfg.options || {};

return {
id,
client,
model,
gen: async (prompt: string) => {
const r = await client.chat.completions.create({
model,
messages: [{ role: "user", content: prompt }],
...options,
});
return r.choices?.[0]?.message?.content ?? "";
},
genWithTools: async (prompt: string, tools: ToolDefinition[]): Promise<LLMToolResult> => {
const { nameMap, formattedTools } = createOpenAICompatibleTools(tools);

const r = await client.chat.completions.create({
model,
messages: [{ role: "user", content: prompt }],
tools: formattedTools,
tool_choice: "auto",
...options,
});

const message = r.choices?.[0]?.message;
return parseOpenAICompatibleResponse(message, nameMap);
},
genStream: async function* (prompt: string) {
const stream = await client.chat.completions.create({
model,
messages: [{ role: "user", content: prompt }],
stream: true,
...options,
});
for await (const chunk of stream as any) {
const delta = chunk?.choices?.[0]?.delta?.content;
if (typeof delta === "string" && delta.length > 0) {
yield delta;
}
}
},
};
}
1 change: 1 addition & 0 deletions src/volcano-sdk.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
export { llmBedrock } from "./llms/bedrock.js";
export { llmVertexStudio } from "./llms/vertex-studio.js";
export { llmAzure } from "./llms/azure.js";
export { llmCohere } from "./llms/cohere.js";
export { createVolcanoTelemetry, noopTelemetry } from "./telemetry.js";
export type { VolcanoTelemetryConfig, VolcanoTelemetry } from "./telemetry.js";
export type { OpenAIConfig, OpenAIOptions } from "./llms/openai.js";
Expand Down Expand Up @@ -74,7 +75,7 @@
const status = e?.status ?? e?.response?.status;
const requestId = e?.response?.headers?.get?.('x-request-id') || e?.id || e?.response?.data?.id;
const retryable = (status == null ? true : isRetryableStatus(status)) || !!e?.code?.toString?.()?.includes?.('ECONN') || !!e?.code?.toString?.()?.includes?.('ETIMEDOUT');
return new LLMError(e?.message || 'LLM error', { ...meta, requestId, retryable }, { cause: e });

Check failure on line 78 in src/volcano-sdk.ts

View workflow job for this annotation

GitHub Actions / build

tests/agent.token-streaming.e2e.test.ts > Token streaming e2e (live APIs) > validates handledByStep flag accurately in mixed scenarios

LLMError: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ normalizeError src/volcano-sdk.ts:78:12 ❯ doStep src/volcano-sdk.ts:2113:23 ❯ Object.stream src/volcano-sdk.ts:2174:23 ❯ tests/agent.token-streaming.e2e.test.ts:148:22 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { meta: { stepId: +0, provider: 'llm:OpenAI-gpt-4o-mini', requestId: undefined, retryable: false } } Caused by: Caused by: Error: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ Function.generate node_modules/openai/core/error.mjs:44:20 ❯ OpenAI.makeStatusError node_modules/openai/client.mjs:162:32 ❯ OpenAI.makeRequest node_modules/openai/client.mjs:330:30 ❯ Object.genStream src/llms/openai.ts:92:22 ❯ executeLLMWithStreaming src/volcano-sdk.ts:715:22 ❯ doStep src/volcano-sdk.ts:2099:31 ❯ Object.stream src/volcano-sdk.ts:2174:23 ❯ tests/agent.token-streaming.e2e.test.ts:148:22 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { status: 401, headers: { constructor: 'Function<Headers>', append: 'Function<append>', delete: 'Function<delete>', get: 'Function<get>', has: 'Function<has>', set: 'Function<set>', getSetCookie: 'Function<getSetCookie>', keys: 'Function<keys>', values: 'Function<values>', entries: 'Function<entries>', forEach: 'Function<forEach>' }, requestID: 'req_8510eb921bf542d18d382a1f3bd8376d', error: { message: 'You didn\'t provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you\'re accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.', type: 'invalid_request_error', param: null, code: null }, code: null, param: null }

Check failure on line 78 in src/volcano-sdk.ts

View workflow job for this annotation

GitHub Actions / build

tests/agent.token-streaming.e2e.test.ts > Token streaming e2e (live APIs) > validates multi-step with OpenAI and Bedrock with mixed onToken callbacks

LLMError: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ normalizeError src/volcano-sdk.ts:78:12 ❯ doStep src/volcano-sdk.ts:2113:23 ❯ Object.stream src/volcano-sdk.ts:2174:23 ❯ tests/agent.token-streaming.e2e.test.ts:91:22 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { meta: { stepId: +0, provider: 'llm:OpenAI-gpt-4o-mini', requestId: undefined, retryable: false } } Caused by: Caused by: Error: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ Function.generate node_modules/openai/core/error.mjs:44:20 ❯ OpenAI.makeStatusError node_modules/openai/client.mjs:162:32 ❯ OpenAI.makeRequest node_modules/openai/client.mjs:330:30 ❯ Object.genStream src/llms/openai.ts:92:22 ❯ executeLLMWithStreaming src/volcano-sdk.ts:715:22 ❯ doStep src/volcano-sdk.ts:2099:31 ❯ Object.stream src/volcano-sdk.ts:2174:23 ❯ tests/agent.token-streaming.e2e.test.ts:91:22 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { status: 401, headers: { constructor: 'Function<Headers>', append: 'Function<append>', delete: 'Function<delete>', get: 'Function<get>', has: 'Function<has>', set: 'Function<set>', getSetCookie: 'Function<getSetCookie>', keys: 'Function<keys>', values: 'Function<values>', entries: 'Function<entries>', forEach: 'Function<forEach>' }, requestID: 'req_de8ab2e083504b4ea757380404cfd1a6', error: { message: 'You didn\'t provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you\'re accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.', type: 'invalid_request_error', param: null, code: null }, code: null, param: null }

Check failure on line 78 in src/volcano-sdk.ts

View workflow job for this annotation

GitHub Actions / build

tests/agent.token-streaming.e2e.test.ts > Token streaming e2e (live APIs) > validates stream-level onToken with metadata using live OpenAI

LLMError: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ normalizeError src/volcano-sdk.ts:78:12 ❯ doStep src/volcano-sdk.ts:2113:23 ❯ Object.stream src/volcano-sdk.ts:2174:23 ❯ tests/agent.token-streaming.e2e.test.ts:47:22 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { meta: { stepId: +0, provider: 'llm:OpenAI-gpt-4o-mini', requestId: undefined, retryable: false } } Caused by: Caused by: Error: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ Function.generate node_modules/openai/core/error.mjs:44:20 ❯ OpenAI.makeStatusError node_modules/openai/client.mjs:162:32 ❯ OpenAI.makeRequest node_modules/openai/client.mjs:330:30 ❯ Object.genStream src/llms/openai.ts:92:22 ❯ executeLLMWithStreaming src/volcano-sdk.ts:715:22 ❯ doStep src/volcano-sdk.ts:2099:31 ❯ Object.stream src/volcano-sdk.ts:2174:23 ❯ tests/agent.token-streaming.e2e.test.ts:47:22 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { status: 401, headers: { constructor: 'Function<Headers>', append: 'Function<append>', delete: 'Function<delete>', get: 'Function<get>', has: 'Function<has>', set: 'Function<set>', getSetCookie: 'Function<getSetCookie>', keys: 'Function<keys>', values: 'Function<values>', entries: 'Function<entries>', forEach: 'Function<forEach>' }, requestID: 'req_1a98881287a34c7a84fb953370b2ad39', error: { message: 'You didn\'t provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you\'re accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.', type: 'invalid_request_error', param: null, code: null }, code: null, param: null }

Check failure on line 78 in src/volcano-sdk.ts

View workflow job for this annotation

GitHub Actions / build

tests/agent.token-streaming.e2e.test.ts > Token streaming e2e (live APIs) > validates per-step onToken with live OpenAI

LLMError: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ normalizeError src/volcano-sdk.ts:78:12 ❯ doStep src/volcano-sdk.ts:1524:23 ❯ Object.run src/volcano-sdk.ts:1585:23 ❯ tests/agent.token-streaming.e2e.test.ts:14:21 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { meta: { stepId: +0, provider: 'llm:OpenAI-gpt-4o-mini', requestId: undefined, retryable: false } } Caused by: Caused by: Error: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ Function.generate node_modules/openai/core/error.mjs:44:20 ❯ OpenAI.makeStatusError node_modules/openai/client.mjs:162:32 ❯ OpenAI.makeRequest node_modules/openai/client.mjs:330:30 ❯ Object.genStream src/llms/openai.ts:92:22 ❯ executeLLMWithStreaming src/volcano-sdk.ts:715:22 ❯ doStep src/volcano-sdk.ts:1508:31 ❯ Object.run src/volcano-sdk.ts:1585:23 ❯ tests/agent.token-streaming.e2e.test.ts:14:21 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { status: 401, headers: { constructor: 'Function<Headers>', append: 'Function<append>', delete: 'Function<delete>', get: 'Function<get>', has: 'Function<has>', set: 'Function<set>', getSetCookie: 'Function<getSetCookie>', keys: 'Function<keys>', values: 'Function<values>', entries: 'Function<entries>', forEach: 'Function<forEach>' }, requestID: 'req_6039d7e9adfd4a9b91a3ed25a3295ea8', error: { message: 'You didn\'t provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you\'re accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.', type: 'invalid_request_error', param: null, code: null }, code: null, param: null }

Check failure on line 78 in src/volcano-sdk.ts

View workflow job for this annotation

GitHub Actions / build

tests/agent.crews.e2e.test.ts > Multi-agent crews e2e (live APIs) > validates agent crews work with stream()

LLMError: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ normalizeError src/volcano-sdk.ts:78:12 ❯ doStep src/volcano-sdk.ts:1984:27 ❯ Object.stream src/volcano-sdk.ts:2174:23 ❯ tests/agent.crews.e2e.test.ts:103:22 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { meta: { stepId: +0, provider: 'llm:OpenAI-gpt-4o-mini', requestId: undefined, retryable: false } } Caused by: Caused by: Error: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ Function.generate node_modules/openai/core/error.mjs:44:20 ❯ OpenAI.makeStatusError node_modules/openai/client.mjs:162:32 ❯ OpenAI.makeRequest node_modules/openai/client.mjs:330:30 ❯ Object.gen src/llms/openai.ts:70:17 ❯ doStep src/volcano-sdk.ts:1980:45 ❯ Object.stream src/volcano-sdk.ts:2174:23 ❯ tests/agent.crews.e2e.test.ts:103:22 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { status: 401, headers: { constructor: 'Function<Headers>', append: 'Function<append>', delete: 'Function<delete>', get: 'Function<get>', has: 'Function<has>', set: 'Function<set>', getSetCookie: 'Function<getSetCookie>', keys: 'Function<keys>', values: 'Function<values>', entries: 'Function<entries>', forEach: 'Function<forEach>' }, requestID: 'req_ee99d4a60052412aae79b833cf06b7fa', error: { message: 'You didn\'t provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you\'re accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.', type: 'invalid_request_error', param: null, code: null }, code: null, param: null }

Check failure on line 78 in src/volcano-sdk.ts

View workflow job for this annotation

GitHub Actions / build

tests/agent.crews.e2e.test.ts > Multi-agent crews e2e (live APIs) > validates multi-step agent delegation with live OpenAI

LLMError: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ normalizeError src/volcano-sdk.ts:78:12 ❯ doStep src/volcano-sdk.ts:1385:27 ❯ Object.run src/volcano-sdk.ts:1585:23 ❯ tests/agent.crews.e2e.test.ts:71:21 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { meta: { stepId: +0, provider: 'llm:OpenAI-gpt-4o-mini', requestId: undefined, retryable: false } } Caused by: Caused by: Error: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ Function.generate node_modules/openai/core/error.mjs:44:20 ❯ OpenAI.makeStatusError node_modules/openai/client.mjs:162:32 ❯ OpenAI.makeRequest node_modules/openai/client.mjs:330:30 ❯ Object.gen src/llms/openai.ts:70:17 ❯ doStep src/volcano-sdk.ts:1381:45 ❯ Object.run src/volcano-sdk.ts:1585:23 ❯ tests/agent.crews.e2e.test.ts:71:21 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { status: 401, headers: { constructor: 'Function<Headers>', append: 'Function<append>', delete: 'Function<delete>', get: 'Function<get>', has: 'Function<has>', set: 'Function<set>', getSetCookie: 'Function<getSetCookie>', keys: 'Function<keys>', values: 'Function<values>', entries: 'Function<entries>', forEach: 'Function<forEach>' }, requestID: 'req_797647e2b8064c0284ded1d14d32b822', error: { message: 'You didn\'t provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you\'re accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.', type: 'invalid_request_error', param: null, code: null }, code: null, param: null }

Check failure on line 78 in src/volcano-sdk.ts

View workflow job for this annotation

GitHub Actions / build

tests/agent.crews.e2e.test.ts > Multi-agent crews e2e (live APIs) > validates autonomous agent selection with live OpenAI

LLMError: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ normalizeError src/volcano-sdk.ts:78:12 ❯ doStep src/volcano-sdk.ts:1385:27 ❯ Object.run src/volcano-sdk.ts:1585:23 ❯ tests/agent.crews.e2e.test.ts:28:21 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { meta: { stepId: +0, provider: 'llm:OpenAI-gpt-4o-mini', requestId: undefined, retryable: false } } Caused by: Caused by: Error: 401 You didn't provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys. ❯ Function.generate node_modules/openai/core/error.mjs:44:20 ❯ OpenAI.makeStatusError node_modules/openai/client.mjs:162:32 ❯ OpenAI.makeRequest node_modules/openai/client.mjs:330:30 ❯ Object.gen src/llms/openai.ts:70:17 ❯ doStep src/volcano-sdk.ts:1381:45 ❯ Object.run src/volcano-sdk.ts:1585:23 ❯ tests/agent.crews.e2e.test.ts:28:21 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { status: 401, headers: { constructor: 'Function<Headers>', append: 'Function<append>', delete: 'Function<delete>', get: 'Function<get>', has: 'Function<has>', set: 'Function<set>', getSetCookie: 'Function<getSetCookie>', keys: 'Function<keys>', values: 'Function<values>', entries: 'Function<entries>', forEach: 'Function<forEach>' }, requestID: 'req_7cf62cfeda8b4dcdb76d5422ed889e7c', error: { message: 'You didn\'t provide an API key. You need to provide your API key in an Authorization header using *** (i.e. Authorization: *** or as the password field (with blank username) if you\'re accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.', type: 'invalid_request_error', param: null, code: null }, code: null, param: null }
}
if (kind === 'mcp-conn') {
const retryable = true;
Expand Down
53 changes: 53 additions & 0 deletions tests/llms/cohere.integration.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import { describe, it, expect } from 'vitest';
import { llmCohere } from '../../dist/volcano-sdk.js';

describe('Cohere provider (integration)', () => {
it('calls Cohere and returns at least one tool call when tools are provided', async () => {
if (!process.env.COHERE_API_KEY) {
throw new Error('COHERE_API_KEY is required for this test');
}
const llm = llmCohere({ apiKey: process.env.COHERE_API_KEY!, model: process.env.COHERE_MODEL || 'command-a-03-2025' });
const tools: any = [{
name: 'astro.get_sign',
description: 'Return sign for birthdate',
parameters: { type: 'object', properties: { birthdate: { type: 'string' } }, required: ['birthdate'] }
}];
const res = await llm.genWithTools('Find the astrological sign for 1993-07-11 using available tools.', tools);
expect(Array.isArray(res.toolCalls)).toBe(true);
}, 60000);

it('streams tokens that concatenate to the non-stream answer', async () => {
if (!process.env.COHERE_API_KEY) {
throw new Error('COHERE_API_KEY is required for this test');
}
const llm = llmCohere({ apiKey: process.env.COHERE_API_KEY!, model: process.env.COHERE_MODEL || 'command-a-03-2025' });
const prompt = 'Reply ONLY with STREAM_OK';
const nonStream = await llm.gen(prompt);
const normalizedA = nonStream.trim().replace(/[^A-Za-z0-9_]/g, '').toUpperCase();

let streamed = '';
for await (const chunk of llm.genStream(prompt)) {
streamed += chunk;
}
const normalizedB = streamed.trim().replace(/[^A-Za-z0-9_]/g, '').toUpperCase();

// Proper property: streaming concat should equal non-stream output (normalized)
expect(normalizedA).toBe(normalizedB);
expect(normalizedA.length).toBeGreaterThan(0);
}, 30000);

it('can produce valid JSON when asked', async () => {
if (!process.env.COHERE_API_KEY) {
throw new Error('COHERE_API_KEY is required for this test');
}
const llm = llmCohere({ apiKey: process.env.COHERE_API_KEY!, model: process.env.COHERE_MODEL || 'command-a-03-2025' });
const prompt = 'Return ONLY valid minified JSON: {"ok":true,"provider":"cohere"}';
const out = await llm.gen(prompt);
const text = out.trim();
// Extract JSON if wrapped in code fences
const m = text.match(/\{[\s\S]*\}/);
const jsonStr = m ? m[0] : text;
const obj = JSON.parse(jsonStr);
expect(obj && obj.ok === true && obj.provider === 'cohere').toBe(true);
}, 30000);
});
54 changes: 54 additions & 0 deletions tests/llms/cohere.unit.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { llmCohere } from '../../dist/volcano-sdk.js';

const calls: any[] = [];

vi.mock('openai', () => {
class MockOpenAI {
chat = {
completions: {
create: async (req: any) => {
calls.push(req);
return {
choices: [
{
message: {
content: null,
tool_calls: [
// Return the sanitized version of the input tool name
{ id: 't1', function: { name: 'mcp_abc123_get_sign', arguments: '{"birthdate":"1993-07-11"}' } }
]
}
}
]
} as any;
}
}
};
constructor(_: any) {}
}
return { default: MockOpenAI };
});

describe('Cohere provider (unit)', () => {
beforeEach(() => { calls.length = 0; });

it('sanitizes tool names and maps back to dotted names', async () => {
const llm: any = llmCohere({ apiKey: 'sk-test', model: 'command-a-03-2025' });
const tools = [
{
name: 'mcp_abc123.get_sign', // Hash-based ID format
description: 'Get sign',
parameters: { type: 'object', properties: { birthdate: { type: 'string' } } }
}
];

const res = await llm.genWithTools('Do task', tools as any);

expect(calls.length).toBe(1);
// Sanitized: dots become underscores for Cohere
expect(calls[0].tools[0].function.name).toBe('mcp_abc123_get_sign');
// Result: maps back to original dotted name
expect(res.toolCalls[0].name).toBe('mcp_abc123.get_sign');
});
});
Loading