Skip to content

Commit f488198

Browse files
authored
πŸ“¦ NEW: Pipe v1 support (#121)
* πŸ“¦ NEW: Memory API v1 endpoints * πŸ“¦ NEW: Get provider function in core * πŸ“¦ NEW: Pipe run v1 support in core * πŸ“¦ NEW: Pipe run v1 endpoint * πŸ“¦ NEW: Get provider function * πŸ‘Œ IMPROVE: Pipe type version * πŸ‘Œ IMPROVE: Add tools to params function * πŸ‘Œ IMPROVE: Pipe v1 type * πŸ“¦ NEW: Pipe v1 run support in call llm function and utilities * πŸ‘Œ IMPROVE: Pipe v1 support in core * πŸ‘Œ IMPROVE: Base URL * πŸ‘Œ IMPROVE: Pipe type * πŸ“¦ NEW: Pipe tool and toolChoice zod schema * πŸ‘Œ IMPROVE: Pipe v1 run endpoint schema * πŸ‘Œ IMPROVE: Unused file * πŸ‘Œ IMPROVE: Model string * πŸ‘Œ IMPROVE: Pipe run endpoint in handleStreamResponse
1 parent 99362c8 commit f488198

28 files changed

+538
-220
lines changed

β€Žpackages/baseai/src/deploy/index.ts

Lines changed: 13 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -566,7 +566,6 @@ export async function upsertMemory({
566566
docsToDelete?: string[];
567567
}): Promise<void> {
568568
const { createMemory } = getMemoryApiUrls({
569-
account,
570569
memoryName: memory.name
571570
});
572571

@@ -872,7 +871,6 @@ export async function listMemoryDocuments({
872871
memoryName: string;
873872
}) {
874873
const { listDocuments } = getMemoryApiUrls({
875-
account,
876874
memoryName: memoryName
877875
});
878876

@@ -901,8 +899,8 @@ export async function listMemoryDocuments({
901899
);
902900
}
903901

904-
const res = (await listResponse.json()) as { docs: { name: string }[] };
905-
const documents = res.docs.map((doc: { name: string }) => doc.name);
902+
const res = (await listResponse.json()) as { name: string }[];
903+
const documents = res.map((doc: { name: string }) => doc.name);
906904
return documents;
907905
}
908906

@@ -916,7 +914,6 @@ async function getSignedUploadUrl({
916914
account: Account;
917915
}): Promise<string> {
918916
const { uploadDocument } = getMemoryApiUrls({
919-
account,
920917
memoryName
921918
});
922919

@@ -968,7 +965,6 @@ async function deleteDocument({
968965
account: Account;
969966
}) {
970967
const { deleteDocument } = getMemoryApiUrls({
971-
account,
972968
memoryName,
973969
documentName
974970
});
@@ -1033,44 +1029,36 @@ async function uploadDocument(signedUrl: string, document: Blob) {
10331029
}
10341030

10351031
export function getMemoryApiUrls({
1036-
account,
10371032
memoryName,
10381033
documentName
10391034
}: {
1040-
account: Account;
10411035
memoryName: string;
10421036
documentName?: string;
10431037
}) {
1044-
const isOrgAccount = account.apiKey.includes(':');
1045-
const ownerLogin = isOrgAccount
1046-
? account.apiKey.split(':')[0]
1047-
: account.login;
1048-
const baseUrl = `https://api.langbase.com/beta`;
1049-
const baseUrlV1 = `https://api.langbase.com/v1`;
1038+
// Base URL
1039+
const baseUrl = `https://api.langbase.com/v1`;
10501040

10511041
// Create memory URL
1052-
const createUrlOrg = `${baseUrl}/org/${ownerLogin}/memorysets`;
1053-
const createUrlUser = `${baseUrl}/user/memorysets`;
1042+
const createMemory = `${baseUrl}/memory`;
1043+
1044+
// Delete memory URL
1045+
const deleteMemory = `${baseUrl}/memory/${memoryName}`;
10541046

10551047
// Upload document URL
1056-
const uploadDocumentOrg = `${baseUrl}/org/${ownerLogin}/memorysets/documents`;
1057-
const uploadDocumentUser = `${baseUrl}/user/memorysets/documents`;
1048+
const uploadDocument = `${baseUrl}/memory/documents`;
10581049

10591050
// List documents URL
1060-
const listDocuments = `${baseUrl}/memorysets/${ownerLogin}/${memoryName}/documents`;
1061-
1062-
// Delete memory URL
1063-
const deleteMemory = `${baseUrl}/memorysets/${ownerLogin}/${memoryName}`;
1051+
const listDocuments = `${baseUrl}/memory/${memoryName}/documents`;
10641052

10651053
// Delete document URL
1066-
const deleteDocument = `${baseUrlV1}/memory/${memoryName}/documents/${documentName}`;
1054+
const deleteDocument = `${baseUrl}/memory/${memoryName}/documents/${documentName}`;
10671055

10681056
return {
10691057
listDocuments,
10701058
deleteMemory,
10711059
deleteDocument,
1072-
createMemory: isOrgAccount ? createUrlOrg : createUrlUser,
1073-
uploadDocument: isOrgAccount ? uploadDocumentOrg : uploadDocumentUser
1060+
createMemory,
1061+
uploadDocument
10741062
};
10751063
}
10761064

@@ -1090,7 +1078,6 @@ async function overwriteMemory({
10901078
// Delete old memory.
10911079
dlog(`Deleting old memory: ${memory.name}`);
10921080
const { deleteMemory } = getMemoryApiUrls({
1093-
account,
10941081
memoryName: memory.name
10951082
});
10961083

β€Žpackages/baseai/src/dev/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ import { customCors } from './middleware/custom-cors';
1313
import { poweredBy } from './middleware/powered-by';
1414
import { preFlight } from './middleware/pre-flight';
1515
import { registerRoot } from './routes/base';
16-
import { registerBetaPipesRun } from './routes/beta/pipes/run';
16+
import { registerV1PipesRun } from './routes/v1/pipes/run';
1717

1818
export async function runBaseServer() {
1919
const app = new Hono();
@@ -29,7 +29,7 @@ export async function runBaseServer() {
2929

3030
// Routes.
3131
registerRoot(app);
32-
registerBetaPipesRun(app);
32+
registerV1PipesRun(app);
3333

3434
const port = 9000;
3535

β€Žpackages/baseai/src/dev/llms/call-anthropic.ts

Lines changed: 19 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import { handleProviderRequest } from '../utils/provider-handlers/provider-reque
44
import { ANTHROPIC } from '../data/models';
55
import { handleLlmError } from './utils';
66
import type { ModelParams } from 'types/providers';
7-
import type { Message } from 'types/pipe';
7+
import type { Message, Pipe } from 'types/pipe';
88
import { addToolsToParams } from '../utils/add-tools-to-params';
99

1010
export async function callAnthropic({
@@ -13,7 +13,7 @@ export async function callAnthropic({
1313
llmApiKey,
1414
stream
1515
}: {
16-
pipe: any;
16+
pipe: Pipe;
1717
llmApiKey: string;
1818
stream: boolean;
1919
messages: Message[];
@@ -44,14 +44,28 @@ export async function callAnthropic({
4444
}
4545

4646
function buildModelParams(
47-
pipe: any,
47+
pipe: Pipe,
4848
stream: boolean,
4949
messages: Message[]
5050
): ModelParams {
51+
const model = pipe.model.split(':')[1];
52+
const {
53+
top_p,
54+
max_tokens,
55+
temperature,
56+
presence_penalty,
57+
frequency_penalty,
58+
stop
59+
} = pipe;
5160
return {
5261
messages,
5362
stream,
54-
model: pipe.model.name,
55-
...pipe.model.params
63+
model,
64+
top_p,
65+
max_tokens,
66+
temperature,
67+
presence_penalty,
68+
frequency_penalty,
69+
stop
5670
};
5771
}

β€Žpackages/baseai/src/dev/llms/call-cohere.ts

Lines changed: 19 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,15 @@ import { dlog } from '../utils/dlog';
44
import { COHERE } from '../data/models';
55
import { handleLlmError } from './utils';
66
import type { ModelParams } from 'types/providers';
7-
import type { Message } from 'types/pipe';
7+
import type { Message, Pipe } from 'types/pipe';
88

99
export async function callCohere({
1010
pipe,
1111
messages,
1212
llmApiKey,
1313
stream
1414
}: {
15-
pipe: any;
15+
pipe: Pipe;
1616
llmApiKey: string;
1717
messages: Message[];
1818
stream: boolean;
@@ -42,14 +42,28 @@ export async function callCohere({
4242
}
4343

4444
function buildModelParams(
45-
pipe: any,
45+
pipe: Pipe,
4646
stream: boolean,
4747
messages: Message[]
4848
): ModelParams {
49+
const model = pipe.model.split(':')[1];
50+
const {
51+
top_p,
52+
max_tokens,
53+
temperature,
54+
presence_penalty,
55+
frequency_penalty,
56+
stop
57+
} = pipe;
4958
return {
5059
messages,
5160
stream,
52-
model: pipe.model.name,
53-
...pipe.model.params
61+
model,
62+
top_p,
63+
max_tokens,
64+
temperature,
65+
presence_penalty,
66+
frequency_penalty,
67+
stop
5468
};
5569
}

β€Žpackages/baseai/src/dev/llms/call-fireworks.ts

Lines changed: 23 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,15 @@ import { FIREWORKS_AI } from '../data/models';
55

66
import { handleLlmError } from './utils';
77
import type { ModelParams } from 'types/providers';
8-
import type { Message } from 'types/pipe';
8+
import type { Message, Pipe } from 'types/pipe';
99

1010
export async function callFireworks({
1111
pipe,
1212
messages,
1313
llmApiKey,
1414
stream
1515
}: {
16-
pipe: any;
16+
pipe: Pipe;
1717
llmApiKey: string;
1818
stream: boolean;
1919
messages: Message[];
@@ -30,7 +30,7 @@ export async function callFireworks({
3030
dlog('Fireworks request params', transformedRequestParams);
3131

3232
// Fireworks llama-3.1 405b behaves weirdly with stop value. Bug on their side. Omitting it.
33-
if (pipe.model.name === 'llama-v3p1-405b-instruct')
33+
if (modelParams?.model === 'llama-v3p1-405b-instruct')
3434
delete transformedRequestParams['stop'];
3535

3636
const providerOptions = { provider: FIREWORKS_AI, llmApiKey };
@@ -47,19 +47,33 @@ export async function callFireworks({
4747
}
4848

4949
function buildModelParams(
50-
pipe: any,
50+
pipe: Pipe,
5151
stream: boolean,
5252
messages: Message[]
5353
): ModelParams {
5454
// Create model strings for Fireworks AI
55-
const modelString =
56-
pipe.model.name === 'yi-large'
55+
const pipeModel = pipe.model.split(':')[1];
56+
const model =
57+
pipeModel === 'yi-large'
5758
? 'accounts/yi-01-ai/models/yi-large'
58-
: `accounts/fireworks/models/${pipe.model.name}`;
59+
: `accounts/fireworks/models/${pipeModel}`;
60+
const {
61+
top_p,
62+
max_tokens,
63+
temperature,
64+
presence_penalty,
65+
frequency_penalty,
66+
stop
67+
} = pipe;
5968
return {
6069
messages,
6170
stream,
62-
model: modelString,
63-
...pipe.model.params
71+
model,
72+
top_p,
73+
max_tokens,
74+
temperature,
75+
presence_penalty,
76+
frequency_penalty,
77+
stop
6478
};
6579
}

β€Žpackages/baseai/src/dev/llms/call-google.ts

Lines changed: 19 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import { handleProviderRequest } from '../utils/provider-handlers/provider-reque
44
import { GOOGLE } from '../data/models';
55
import { applyJsonModeIfEnabledForGoogle, handleLlmError } from './utils';
66
import type { ModelParams } from 'types/providers';
7-
import type { Message } from 'types/pipe';
7+
import type { Message, Pipe } from 'types/pipe';
88
import { addToolsToParams } from '../utils/add-tools-to-params';
99

1010
export async function callGoogle({
@@ -13,7 +13,7 @@ export async function callGoogle({
1313
llmApiKey,
1414
stream
1515
}: {
16-
pipe: any;
16+
pipe: Pipe;
1717
stream: boolean;
1818
llmApiKey: string;
1919
messages: Message[];
@@ -48,15 +48,29 @@ export async function callGoogle({
4848
}
4949

5050
function buildModelParams(
51-
pipe: any,
51+
pipe: Pipe,
5252
stream: boolean,
5353
messages: Message[]
5454
): ModelParams {
55+
const model = pipe.model.split(':')[1];
56+
const {
57+
top_p,
58+
max_tokens,
59+
temperature,
60+
presence_penalty,
61+
frequency_penalty,
62+
stop
63+
} = pipe;
5564
return {
5665
messages,
5766
stream,
58-
model: pipe.model.name,
59-
...pipe.model.params
67+
model,
68+
top_p,
69+
max_tokens,
70+
temperature,
71+
presence_penalty,
72+
frequency_penalty,
73+
stop
6074
};
6175
}
6276

β€Žpackages/baseai/src/dev/llms/call-groq.ts

Lines changed: 19 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,15 @@ import { GROQ } from '../data/models';
44
import transformToProviderRequest from '../utils/provider-handlers/transfrom-to-provider-request';
55
import { applyJsonModeIfEnabled, handleLlmError } from './utils';
66
import type { ModelParams } from 'types/providers';
7-
import type { Message } from 'types/pipe';
7+
import type { Message, Pipe } from 'types/pipe';
88

99
export async function callGroq({
1010
pipe,
1111
messages,
1212
llmApiKey,
1313
stream
1414
}: {
15-
pipe: any;
15+
pipe: Pipe;
1616
llmApiKey: string;
1717
stream: boolean;
1818
messages: Message[];
@@ -42,14 +42,28 @@ export async function callGroq({
4242
}
4343

4444
function buildModelParams(
45-
pipe: any,
45+
pipe: Pipe,
4646
stream: boolean,
4747
messages: Message[]
4848
): ModelParams {
49+
const model = pipe.model.split(':')[1];
50+
const {
51+
top_p,
52+
max_tokens,
53+
temperature,
54+
presence_penalty,
55+
frequency_penalty,
56+
stop
57+
} = pipe;
4958
return {
5059
messages,
5160
stream,
52-
model: pipe.model.name,
53-
...pipe.model.params
61+
model,
62+
top_p,
63+
max_tokens,
64+
temperature,
65+
presence_penalty,
66+
frequency_penalty,
67+
stop
5468
};
5569
}

0 commit comments

Comments
Β (0)