Skip to content

Commit abaafb0

Browse files
Merge branch 'main' into pangea-plugin
2 parents 558d1af + b93034c commit abaafb0

File tree

10 files changed

+350
-398
lines changed

10 files changed

+350
-398
lines changed

.github/workflows/link-checker.yml

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
name: Check Markdown links
2+
3+
on:
4+
push:
5+
paths:
6+
- '**/*.md' # Only run when markdown files change
7+
pull_request:
8+
branches:
9+
- main
10+
schedule:
11+
- cron: '0 0 * * 0' # Run weekly on Sundays
12+
workflow_dispatch: # Allows manual triggering
13+
14+
jobs:
15+
linkChecker:
16+
runs-on: ubuntu-latest
17+
steps:
18+
- uses: actions/checkout@v3
19+
20+
- name: Link Checker
21+
uses: lycheeverse/[email protected]
22+
with:
23+
args: --verbose --no-progress --fail './**/*.md'
24+
env:
25+
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
26+
27+
- name: Create Issue If Failed
28+
if: failure()
29+
uses: actions/github-script@v6
30+
with:
31+
script: |
32+
const title = '🔗 Broken links found in documentation';
33+
const body = 'The link checker found broken links in the documentation. Please check the [workflow run](${process.env.GITHUB_SERVER_URL}/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}) for details.';
34+
35+
const existingIssues = await github.rest.issues.listForRepo({
36+
owner: context.repo.owner,
37+
repo: context.repo.repo,
38+
labels: 'documentation,broken-links',
39+
});
40+
41+
const issueExists = existingIssues.data.some(issue => issue.title === title);
42+
if (!issueExists) {
43+
await github.rest.issues.create({
44+
owner: context.repo.owner,
45+
repo: context.repo.repo,
46+
title: title,
47+
body: body,
48+
labels: ['documentation', 'broken-links']
49+
});
50+
}

README.md

Lines changed: 86 additions & 86 deletions
Large diffs are not rendered by default.

cookbook/monitoring-agents/CrewAI_with_Telemetry.ipynb

Lines changed: 157 additions & 312 deletions
Large diffs are not rendered by default.

src/globals.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,7 @@ export const LAMBDA: string = 'lambda';
7676
export const DASHSCOPE: string = 'dashscope';
7777
export const X_AI: string = 'x-ai';
7878
export const SAGEMAKER: string = 'sagemaker';
79+
export const NEBIUS: string = 'nebius';
7980

8081
export const VALID_PROVIDERS = [
8182
ANTHROPIC,
@@ -125,6 +126,7 @@ export const VALID_PROVIDERS = [
125126
DASHSCOPE,
126127
X_AI,
127128
SAGEMAKER,
129+
NEBIUS,
128130
];
129131

130132
export const CONTENT_TYPES = {

src/handlers/handlerUtils.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -786,6 +786,8 @@ export function constructConfigFromRequestHeaders(
786786
requestHeaders[`x-${POWERED_BY}-amzn-sagemaker-inference-component`],
787787
amznSagemakerSessionId:
788788
requestHeaders[`x-${POWERED_BY}-amzn-sagemaker-session-id`],
789+
amznSagemakerModelName:
790+
requestHeaders[`x-${POWERED_BY}-amzn-sagemaker-model-name`],
789791
};
790792

791793
const workersAiConfig = {

src/providers/groq/chatComplete.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,10 @@ export const GroqChatCompleteConfig: ProviderConfig = {
5454
max: 1,
5555
min: 1,
5656
},
57+
tools: {
58+
param: 'tools',
59+
required: false,
60+
},
5761
};
5862

5963
export interface GroqChatCompleteResponse extends ChatCompletionResponse {}

src/providers/index.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ import { DashScopeConfig } from './dashscope';
5050
import XAIConfig from './x-ai';
5151
import QdrantConfig from './qdrant';
5252
import SagemakerConfig from './sagemaker';
53+
import NebiusConfig from './nebius';
5354

5455
const Providers: { [key: string]: ProviderConfigs } = {
5556
openai: OpenAIConfig,
@@ -100,6 +101,7 @@ const Providers: { [key: string]: ProviderConfigs } = {
100101
'x-ai': XAIConfig,
101102
qdrant: QdrantConfig,
102103
sagemaker: SagemakerConfig,
104+
nebius: NebiusConfig,
103105
};
104106

105107
export default Providers;

src/providers/nebius/api.ts

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import { ProviderAPIConfig } from '../types';
2+
3+
export const nebiusAPIConfig: ProviderAPIConfig = {
4+
getBaseURL: () => 'https://api.studio.nebius.ai/v1',
5+
headers({ providerOptions }) {
6+
const { apiKey } = providerOptions;
7+
return { Authorization: `Bearer ${apiKey}` };
8+
},
9+
getEndpoint({ fn }) {
10+
switch (fn) {
11+
case 'chatComplete':
12+
return `/chat/completions`;
13+
case 'embed':
14+
return `/embeddings`;
15+
case 'complete':
16+
return '/completions';
17+
default:
18+
return '';
19+
}
20+
},
21+
};

src/providers/nebius/index.ts

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import { NEBIUS } from '../../globals';
2+
import {
3+
chatCompleteParams,
4+
embedParams,
5+
completeParams,
6+
responseTransformers,
7+
} from '../open-ai-base';
8+
import { ProviderConfigs } from '../types';
9+
import { nebiusAPIConfig } from './api';
10+
11+
export const NebiusConfig: ProviderConfigs = {
12+
chatComplete: chatCompleteParams([], {
13+
model: 'Qwen/Qwen2.5-72B-Instruct-fast',
14+
}),
15+
embed: embedParams([], { model: 'BAAI/bge-en-icl' }),
16+
complete: completeParams([], { model: 'Qwen/Qwen2.5-72B-Instruct-fast' }),
17+
api: nebiusAPIConfig,
18+
responseTransforms: responseTransformers(NEBIUS, {
19+
chatComplete: true,
20+
embed: true,
21+
complete: true,
22+
}),
23+
};

src/providers/ollama/chatComplete.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,9 @@ export const OllamaChatCompleteConfig: ProviderConfig = {
6464
default: 100,
6565
min: 0,
6666
},
67+
tools: {
68+
param: 'tools',
69+
},
6770
};
6871

6972
export interface OllamaChatCompleteResponse extends ChatCompletionResponse {

0 commit comments

Comments
 (0)