Skip to content

Commit d9d88af

Browse files
committed
chore: cleanup comments, expand examples
1 parent 7106fc0 commit d9d88af

File tree

3 files changed

+73
-16
lines changed

3 files changed

+73
-16
lines changed

fern/chat/non-streaming.mdx

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ We'll build a help desk system for "TechFlow" that processes support messages th
216216
});
217217

218218
const chat = await response.json();
219-
lastChatId = chat.id; // Save for next message
219+
lastChatId = chat.id;
220220
return chat.output[0].content;
221221
}
222222

@@ -295,7 +295,6 @@ We'll build a help desk system for "TechFlow" that processes support messages th
295295
};
296296
}
297297

298-
// Create specialized handlers
299298
const technicalSupport = await createSpecializedChat(
300299
"You are a technical support specialist. Ask clarifying questions and provide step-by-step troubleshooting."
301300
);

fern/chat/openai-compatibility.mdx

Lines changed: 63 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -225,18 +225,39 @@ We'll migrate "TechFlow's" existing OpenAI-powered customer support chat to use
225225

226226
let fullResponse = '';
227227

228-
for await (const event of stream) {
229-
if (event.type === 'response.output_text.delta') {
230-
process.stdout.write(event.delta);
231-
fullResponse += event.delta;
228+
const reader = stream.body?.getReader();
229+
if (!reader) return fullResponse;
230+
231+
const decoder = new TextDecoder();
232+
233+
while (true) {
234+
const { done, value } = await reader.read();
235+
if (done) break;
236+
237+
const chunk = decoder.decode(value);
238+
239+
// Parse and process SSE events
240+
const lines = chunk.split('\n').filter(line => line.trim());
241+
for (const line of lines) {
242+
if (line.startsWith('data: ')) {
243+
try {
244+
const event = JSON.parse(line.slice(6));
245+
if (event.path && event.delta) {
246+
process.stdout.write(event.delta);
247+
fullResponse += event.delta;
248+
}
249+
} catch (e) {
250+
console.error('Invalid JSON line:', line);
251+
continue;
252+
}
253+
}
232254
}
233255
}
234256

235257
console.log('\n\nComplete response received.');
236258
return fullResponse;
237259
}
238260

239-
// Test streaming
240261
streamWithVapi('Write a detailed explanation of REST APIs');
241262
```
242263
</Step>
@@ -267,7 +288,7 @@ We'll migrate "TechFlow's" existing OpenAI-powered customer support chat to use
267288
return response.output[0].content[0].text;
268289
}
269290

270-
return response; // Return stream for streaming requests
291+
return response;
271292
}
272293

273294
return { sendMessage };
@@ -391,8 +412,22 @@ We'll migrate "TechFlow's" existing OpenAI-powered customer support chat to use
391412
if (done) break;
392413

393414
const chunk = decoder.decode(value);
394-
// Process SSE events here...
395-
console.log(chunk);
415+
416+
// Parse and process SSE events
417+
const lines = chunk.split('\n').filter(line => line.trim());
418+
for (const line of lines) {
419+
if (line.startsWith('data: ')) {
420+
try {
421+
const event = JSON.parse(line.slice(6));
422+
if (event.path && event.delta) {
423+
process.stdout.write(event.delta);
424+
}
425+
} catch (e) {
426+
console.error('Invalid JSON line:', line);
427+
continue;
428+
}
429+
}
430+
}
396431
}
397432
}
398433

@@ -440,8 +475,27 @@ We'll migrate "TechFlow's" existing OpenAI-powered customer support chat to use
440475

441476
if (stream) {
442477
res.setHeader('Content-Type', 'text/event-stream');
478+
res.setHeader('Cache-Control', 'no-cache');
479+
res.setHeader('Connection', 'keep-alive');
480+
443481
const reader = response.body?.getReader();
444-
// Handle streaming response...
482+
if (!reader) {
483+
return res.status(500).json({ error: 'Failed to get stream reader' });
484+
}
485+
486+
const decoder = new TextDecoder();
487+
488+
while (true) {
489+
const { done, value } = await reader.read();
490+
if (done) {
491+
res.write('data: [DONE]\n\n');
492+
res.end();
493+
break;
494+
}
495+
496+
const chunk = decoder.decode(value);
497+
res.write(chunk);
498+
}
445499
} else {
446500
const chat = await response.json();
447501
const openaiResponse = {

fern/chat/streaming.mdx

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ We'll enhance the TechFlow support chat from the quickstart to provide real-time
106106
const data = JSON.parse(line.slice(6));
107107
if (data.path && data.delta) {
108108
fullResponse += data.delta;
109-
process.stdout.write(data.delta); // Print each token
109+
process.stdout.write(data.delta);
110110
}
111111
}
112112
}
@@ -158,6 +158,7 @@ We'll enhance the TechFlow support chat from the quickstart to provide real-time
158158

159159
const decoder = new TextDecoder();
160160
let fullContent = '';
161+
let currentChatId: string | undefined;
161162

162163
while (true) {
163164
const { done, value } = await reader.read();
@@ -169,6 +170,11 @@ We'll enhance the TechFlow support chat from the quickstart to provide real-time
169170
for (const line of lines) {
170171
if (line.startsWith('data: ')) {
171172
const event = JSON.parse(line.slice(6));
173+
174+
if (event.id && !currentChatId) {
175+
currentChatId = event.id;
176+
}
177+
172178
if (event.path && event.delta) {
173179
fullContent += event.delta;
174180
process.stdout.write(event.delta);
@@ -177,10 +183,8 @@ We'll enhance the TechFlow support chat from the quickstart to provide real-time
177183
}
178184
}
179185

180-
// Extract chatId from first message for context
181-
if (!lastChatId) {
182-
// You'd get this from a non-streaming call or API response
183-
// For simplicity, we'll assume you track it separately
186+
if (currentChatId) {
187+
lastChatId = currentChatId;
184188
}
185189

186190
return fullContent;

0 commit comments

Comments
 (0)