Skip to content

Commit de57f6c

Browse files
committed
refactor: remove @microsoft/ai-chat-protocol dependency
1 parent dbaeaa6 commit de57f6c

File tree

10 files changed

+121
-43
lines changed

10 files changed

+121
-43
lines changed

package-lock.json

Lines changed: 0 additions & 25 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

packages/agent-api/package.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,6 @@
2626
"@langchain/langgraph": "^0.4.9",
2727
"@langchain/mcp-adapters": "^0.6.0",
2828
"@langchain/openai": "^0.6.13",
29-
"@microsoft/ai-chat-protocol": "^1.0.0-beta.20240814.1",
3029
"@opentelemetry/exporter-trace-otlp-http": "^0.205.0",
3130
"@opentelemetry/instrumentation": "^0.205.0",
3231
"@opentelemetry/sdk-trace-base": "^2.1.0",

packages/agent-api/src/functions/chats-post.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import { Readable } from 'node:stream';
22
import { randomUUID } from 'node:crypto';
33
import { HttpRequest, InvocationContext, HttpResponseInit, app } from '@azure/functions';
4-
import { AIChatCompletionRequest, AIChatCompletionDelta } from '@microsoft/ai-chat-protocol';
54
import { ChatOpenAI } from '@langchain/openai';
65
import { AzureCosmsosDBNoSQLChatMessageHistory } from '@langchain/azure-cosmosdb';
76
import { createReactAgent } from "@langchain/langgraph/prebuilt";
@@ -11,6 +10,7 @@ import { StreamEvent } from '@langchain/core/tracers/log_stream.js';
1110
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js';
1211
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
1312
import { getAzureOpenAiTokenProvider, getCredentials, getInternalUserId } from '../auth.js';
13+
import { type AIChatCompletionRequest, type AIChatCompletionDelta } from '../models.js';
1414

1515
const agentSystemPrompt = `## Role
1616
You an expert assistant that helps users with managing burger orders. Use the provided tools to get the information you need and perform actions on behalf of the user.

packages/agent-api/src/models.ts

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,3 +4,34 @@ export type IntermediateStep = {
44
input?: string;
55
output?: string;
66
}
7+
8+
export type AIChatRole = "user" | "assistant" | "system";
9+
10+
export type AIChatContext = Record<string, any>;
11+
12+
export type AIChatMessage = {
13+
role: AIChatRole;
14+
content: string;
15+
context?: AIChatContext;
16+
}
17+
18+
export type AIChatMessageDelta = {
19+
role?: AIChatRole;
20+
content?: string;
21+
context?: AIChatContext;
22+
}
23+
24+
export type AIChatCompletion = {
25+
message: AIChatMessage;
26+
context?: AIChatContext;
27+
}
28+
29+
export type AIChatCompletionDelta = {
30+
delta: AIChatMessageDelta;
31+
context?: AIChatContext;
32+
}
33+
34+
export type AIChatCompletionRequest = {
35+
messages: AIChatMessage[];
36+
context?: AIChatContext;
37+
}

packages/agent-webapp/package.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
"author": "Microsoft",
2121
"license": "MIT",
2222
"dependencies": {
23-
"@microsoft/ai-chat-protocol": "^1.0.0-beta.20240814.1",
2423
"dompurify": "^3.0.0",
2524
"lit": "^3.0.0",
2625
"marked": "^16.2.1"

packages/agent-webapp/src/components/chat.ts

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@ import { map } from 'lit/directives/map.js';
33
import { repeat } from 'lit/directives/repeat.js';
44
import { unsafeSVG } from 'lit/directives/unsafe-svg.js';
55
import { customElement, property, state, query } from 'lit/decorators.js';
6-
import { type AIChatCompletionDelta, type AIChatMessage } from '@microsoft/ai-chat-protocol';
76
import { type ChatRequestOptions, getCompletion } from '../services/api.service.js';
7+
import { type AIChatMessage } from '../models.js';
88
import { type ParsedMessage, parseMessageIntoHtml } from '../message-parser.js';
99
import sendSvg from '../../assets/icons/send.svg?raw';
1010
import questionSvg from '../../assets/icons/question.svg?raw';
@@ -130,15 +130,14 @@ export class ChatComponent extends LitElement {
130130
this.isLoading = true;
131131
this.scrollToLastMessage();
132132
try {
133-
const response = getCompletion({
133+
const chunks = await getCompletion({
134134
...this.options,
135135
messages: this.messages,
136136
context: {
137137
userId: this.userId,
138138
sessionId: this.sessionId,
139139
},
140140
});
141-
const chunks = response as AsyncGenerator<AIChatCompletionDelta>;
142141
const { messages } = this;
143142
const message: AIChatMessage = {
144143
content: '',

packages/agent-webapp/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,4 @@ export * from './components/user-card.js';
66
export * from './components/debug.js';
77
export * from './components/history.js';
88
export * from './message-parser.js';
9+
export * from './models.js';

packages/agent-webapp/src/message-parser.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { type HTMLTemplateResult, html } from 'lit';
22
import { unsafeHTML } from 'lit/directives/unsafe-html.js';
33
import { marked } from 'marked';
44
import DOMPurify from 'dompurify';
5-
import { type AIChatMessage } from '@microsoft/ai-chat-protocol';
5+
import { type AIChatMessage } from './models';
66

77
export type ParsedMessage = {
88
content: string;
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
export type AIChatRole = "user" | "assistant" | "system";
2+
3+
export type AIChatContext = Record<string, any>;
4+
5+
export type AIChatMessage = {
6+
role: AIChatRole;
7+
content: string;
8+
context?: AIChatContext;
9+
}
10+
11+
export type AIChatMessageDelta = {
12+
role?: AIChatRole;
13+
content?: string;
14+
context?: AIChatContext;
15+
}
16+
17+
export type AIChatCompletion = {
18+
message: AIChatMessage;
19+
context?: AIChatContext;
20+
}
21+
22+
export type AIChatCompletionDelta = {
23+
delta: AIChatMessageDelta;
24+
context?: AIChatContext;
25+
}
Lines changed: 60 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { AIChatMessage, AIChatCompletionDelta, AIChatProtocolClient } from '@microsoft/ai-chat-protocol';
1+
import { type AIChatMessage, type AIChatCompletionDelta } from '../models.js';
22

33
export const apiBaseUrl: string = import.meta.env.VITE_API_URL || '';
44

@@ -9,20 +9,69 @@ export type ChatRequestOptions = {
99
apiUrl: string;
1010
};
1111

12-
export async function* getCompletion(options: ChatRequestOptions) {
12+
export async function getCompletion(options: ChatRequestOptions) {
1313
const apiUrl = options.apiUrl || apiBaseUrl;
14-
const client = new AIChatProtocolClient(`${apiUrl}/api/chats`);
15-
const result = await client.getStreamedCompletion(options.messages, { context: options.context });
14+
const response = await fetch(`${apiUrl}/api/chats/stream`, {
15+
method: 'POST',
16+
headers: { 'Content-Type': 'application/json' },
17+
body: JSON.stringify({
18+
messages: options.messages,
19+
context: options.context || {},
20+
}),
21+
});
1622

17-
for await (const response of result) {
18-
if (!response.delta) {
19-
continue;
20-
}
23+
if (response.status > 299 || !response.ok) {
24+
let json: JSON | undefined;
25+
try {
26+
json = await response.json();
27+
} catch {}
2128

22-
yield new Promise<AIChatCompletionDelta>((resolve) => {
29+
const error = json?.['error'] ?? response.statusText;
30+
throw new Error(error);
31+
}
32+
33+
return getChunksFromResponse<AIChatCompletionDelta>(response, options.chunkIntervalMs);
34+
}
35+
36+
class NdJsonParserStream extends TransformStream<string, JSON> {
37+
private buffer = '';
38+
constructor() {
39+
let controller: TransformStreamDefaultController<JSON>;
40+
super({
41+
start(_controller) {
42+
controller = _controller;
43+
},
44+
transform: (chunk) => {
45+
const jsonChunks = chunk.split('\n').filter(Boolean);
46+
for (const jsonChunk of jsonChunks) {
47+
try {
48+
this.buffer += jsonChunk;
49+
controller.enqueue(JSON.parse(this.buffer));
50+
this.buffer = '';
51+
} catch {
52+
// Invalid JSON, wait for next chunk
53+
}
54+
}
55+
},
56+
});
57+
}
58+
}
59+
60+
export async function* getChunksFromResponse<T>(response: Response, intervalMs: number): AsyncGenerator<T, void> {
61+
const reader = response.body?.pipeThrough(new TextDecoderStream()).pipeThrough(new NdJsonParserStream()).getReader();
62+
if (!reader) {
63+
throw new Error('No response body or body is not readable');
64+
}
65+
66+
let value: JSON | undefined;
67+
let done: boolean;
68+
// eslint-disable-next-line no-await-in-loop
69+
while ((({ value, done } = await reader.read()), !done)) {
70+
const chunk = value as T;
71+
yield new Promise<T>((resolve) => {
2372
setTimeout(() => {
24-
resolve(response);
25-
}, options.chunkIntervalMs);
73+
resolve(chunk);
74+
}, intervalMs);
2675
});
2776
}
2877
}

0 commit comments

Comments
 (0)