Skip to content

Commit 4c42d7c

Browse files
feat(api): update via SDK Studio
1 parent e809702 commit 4c42d7c

File tree

13 files changed

+925
-6
lines changed

13 files changed

+925
-6
lines changed

.stats.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
configured_endpoints: 5
2-
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/kernel%2Fkernel-3b19f5e2b96ede3193aa7a24d3f82d1406b8a16ea25e98ba3956e4a1a2376ad7.yml
3-
openapi_spec_hash: b62a6e73ddcec71674973f795a5790ac
4-
config_hash: df889df131f7438197abd59faace3c77
1+
configured_endpoints: 6
2+
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/kernel%2Fkernel-19b0d17ba368f32827ee322d15a7f4ff7e1f3bbf66606fad227b3465f8ffc5ab.yml
3+
openapi_spec_hash: 4a3cb766898e8a134ef99fe6c4c87736
4+
config_hash: 9018b7ff17f8de1bc3e99a0ae2f2df68

api.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,12 @@
55
Types:
66

77
- <code><a href="./src/resources/apps/deployments.ts">DeploymentCreateResponse</a></code>
8+
- <code><a href="./src/resources/apps/deployments.ts">DeploymentFollowResponse</a></code>
89

910
Methods:
1011

1112
- <code title="post /deploy">client.apps.deployments.<a href="./src/resources/apps/deployments.ts">create</a>({ ...params }) -> DeploymentCreateResponse</code>
13+
- <code title="get /apps/{id}/events">client.apps.deployments.<a href="./src/resources/apps/deployments.ts">follow</a>(id) -> DeploymentFollowResponse</code>
1214

1315
## Invocations
1416

src/core/streaming.ts

Lines changed: 301 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,301 @@
1+
import { KernelError } from './error';
2+
import { type ReadableStream } from '../internal/shim-types';
3+
import { makeReadableStream } from '../internal/shims';
4+
import { findDoubleNewlineIndex, LineDecoder } from '../internal/decoders/line';
5+
import { ReadableStreamToAsyncIterable } from '../internal/shims';
6+
import { isAbortError } from '../internal/errors';
7+
import { encodeUTF8 } from '../internal/utils/bytes';
8+
9+
type Bytes = string | ArrayBuffer | Uint8Array | null | undefined;
10+
11+
export type ServerSentEvent = {
12+
event: string | null;
13+
data: string;
14+
raw: string[];
15+
};
16+
17+
export class Stream<Item> implements AsyncIterable<Item> {
18+
controller: AbortController;
19+
20+
constructor(
21+
private iterator: () => AsyncIterator<Item>,
22+
controller: AbortController,
23+
) {
24+
this.controller = controller;
25+
}
26+
27+
static fromSSEResponse<Item>(response: Response, controller: AbortController): Stream<Item> {
28+
let consumed = false;
29+
30+
async function* iterator(): AsyncIterator<Item, any, undefined> {
31+
if (consumed) {
32+
throw new KernelError('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
33+
}
34+
consumed = true;
35+
let done = false;
36+
try {
37+
for await (const sse of _iterSSEMessages(response, controller)) {
38+
try {
39+
yield JSON.parse(sse.data);
40+
} catch (e) {
41+
console.error(`Could not parse message into JSON:`, sse.data);
42+
console.error(`From chunk:`, sse.raw);
43+
throw e;
44+
}
45+
}
46+
done = true;
47+
} catch (e) {
48+
// If the user calls `stream.controller.abort()`, we should exit without throwing.
49+
if (isAbortError(e)) return;
50+
throw e;
51+
} finally {
52+
// If the user `break`s, abort the ongoing request.
53+
if (!done) controller.abort();
54+
}
55+
}
56+
57+
return new Stream(iterator, controller);
58+
}
59+
60+
/**
61+
* Generates a Stream from a newline-separated ReadableStream
62+
* where each item is a JSON value.
63+
*/
64+
static fromReadableStream<Item>(readableStream: ReadableStream, controller: AbortController): Stream<Item> {
65+
let consumed = false;
66+
67+
async function* iterLines(): AsyncGenerator<string, void, unknown> {
68+
const lineDecoder = new LineDecoder();
69+
70+
const iter = ReadableStreamToAsyncIterable<Bytes>(readableStream);
71+
for await (const chunk of iter) {
72+
for (const line of lineDecoder.decode(chunk)) {
73+
yield line;
74+
}
75+
}
76+
77+
for (const line of lineDecoder.flush()) {
78+
yield line;
79+
}
80+
}
81+
82+
async function* iterator(): AsyncIterator<Item, any, undefined> {
83+
if (consumed) {
84+
throw new KernelError('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
85+
}
86+
consumed = true;
87+
let done = false;
88+
try {
89+
for await (const line of iterLines()) {
90+
if (done) continue;
91+
if (line) yield JSON.parse(line);
92+
}
93+
done = true;
94+
} catch (e) {
95+
// If the user calls `stream.controller.abort()`, we should exit without throwing.
96+
if (isAbortError(e)) return;
97+
throw e;
98+
} finally {
99+
// If the user `break`s, abort the ongoing request.
100+
if (!done) controller.abort();
101+
}
102+
}
103+
104+
return new Stream(iterator, controller);
105+
}
106+
107+
[Symbol.asyncIterator](): AsyncIterator<Item> {
108+
return this.iterator();
109+
}
110+
111+
/**
112+
* Splits the stream into two streams which can be
113+
* independently read from at different speeds.
114+
*/
115+
tee(): [Stream<Item>, Stream<Item>] {
116+
const left: Array<Promise<IteratorResult<Item>>> = [];
117+
const right: Array<Promise<IteratorResult<Item>>> = [];
118+
const iterator = this.iterator();
119+
120+
const teeIterator = (queue: Array<Promise<IteratorResult<Item>>>): AsyncIterator<Item> => {
121+
return {
122+
next: () => {
123+
if (queue.length === 0) {
124+
const result = iterator.next();
125+
left.push(result);
126+
right.push(result);
127+
}
128+
return queue.shift()!;
129+
},
130+
};
131+
};
132+
133+
return [
134+
new Stream(() => teeIterator(left), this.controller),
135+
new Stream(() => teeIterator(right), this.controller),
136+
];
137+
}
138+
139+
/**
140+
* Converts this stream to a newline-separated ReadableStream of
141+
* JSON stringified values in the stream
142+
* which can be turned back into a Stream with `Stream.fromReadableStream()`.
143+
*/
144+
toReadableStream(): ReadableStream {
145+
const self = this;
146+
let iter: AsyncIterator<Item>;
147+
148+
return makeReadableStream({
149+
async start() {
150+
iter = self[Symbol.asyncIterator]();
151+
},
152+
async pull(ctrl: any) {
153+
try {
154+
const { value, done } = await iter.next();
155+
if (done) return ctrl.close();
156+
157+
const bytes = encodeUTF8(JSON.stringify(value) + '\n');
158+
159+
ctrl.enqueue(bytes);
160+
} catch (err) {
161+
ctrl.error(err);
162+
}
163+
},
164+
async cancel() {
165+
await iter.return?.();
166+
},
167+
});
168+
}
169+
}
170+
171+
export async function* _iterSSEMessages(
172+
response: Response,
173+
controller: AbortController,
174+
): AsyncGenerator<ServerSentEvent, void, unknown> {
175+
if (!response.body) {
176+
controller.abort();
177+
if (
178+
typeof (globalThis as any).navigator !== 'undefined' &&
179+
(globalThis as any).navigator.product === 'ReactNative'
180+
) {
181+
throw new KernelError(
182+
`The default react-native fetch implementation does not support streaming. Please use expo/fetch: https://docs.expo.dev/versions/latest/sdk/expo/#expofetch-api`,
183+
);
184+
}
185+
throw new KernelError(`Attempted to iterate over a response with no body`);
186+
}
187+
188+
const sseDecoder = new SSEDecoder();
189+
const lineDecoder = new LineDecoder();
190+
191+
const iter = ReadableStreamToAsyncIterable<Bytes>(response.body);
192+
for await (const sseChunk of iterSSEChunks(iter)) {
193+
for (const line of lineDecoder.decode(sseChunk)) {
194+
const sse = sseDecoder.decode(line);
195+
if (sse) yield sse;
196+
}
197+
}
198+
199+
for (const line of lineDecoder.flush()) {
200+
const sse = sseDecoder.decode(line);
201+
if (sse) yield sse;
202+
}
203+
}
204+
205+
/**
206+
* Given an async iterable iterator, iterates over it and yields full
207+
* SSE chunks, i.e. yields when a double new-line is encountered.
208+
*/
209+
async function* iterSSEChunks(iterator: AsyncIterableIterator<Bytes>): AsyncGenerator<Uint8Array> {
210+
let data = new Uint8Array();
211+
212+
for await (const chunk of iterator) {
213+
if (chunk == null) {
214+
continue;
215+
}
216+
217+
const binaryChunk =
218+
chunk instanceof ArrayBuffer ? new Uint8Array(chunk)
219+
: typeof chunk === 'string' ? encodeUTF8(chunk)
220+
: chunk;
221+
222+
let newData = new Uint8Array(data.length + binaryChunk.length);
223+
newData.set(data);
224+
newData.set(binaryChunk, data.length);
225+
data = newData;
226+
227+
let patternIndex;
228+
while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) {
229+
yield data.slice(0, patternIndex);
230+
data = data.slice(patternIndex);
231+
}
232+
}
233+
234+
if (data.length > 0) {
235+
yield data;
236+
}
237+
}
238+
239+
class SSEDecoder {
240+
private data: string[];
241+
private event: string | null;
242+
private chunks: string[];
243+
244+
constructor() {
245+
this.event = null;
246+
this.data = [];
247+
this.chunks = [];
248+
}
249+
250+
decode(line: string) {
251+
if (line.endsWith('\r')) {
252+
line = line.substring(0, line.length - 1);
253+
}
254+
255+
if (!line) {
256+
// empty line and we didn't previously encounter any messages
257+
if (!this.event && !this.data.length) return null;
258+
259+
const sse: ServerSentEvent = {
260+
event: this.event,
261+
data: this.data.join('\n'),
262+
raw: this.chunks,
263+
};
264+
265+
this.event = null;
266+
this.data = [];
267+
this.chunks = [];
268+
269+
return sse;
270+
}
271+
272+
this.chunks.push(line);
273+
274+
if (line.startsWith(':')) {
275+
return null;
276+
}
277+
278+
let [fieldname, _, value] = partition(line, ':');
279+
280+
if (value.startsWith(' ')) {
281+
value = value.substring(1);
282+
}
283+
284+
if (fieldname === 'event') {
285+
this.event = value;
286+
} else if (fieldname === 'data') {
287+
this.data.push(value);
288+
}
289+
290+
return null;
291+
}
292+
}
293+
294+
function partition(str: string, delimiter: string): [string, string, string] {
295+
const index = str.indexOf(delimiter);
296+
if (index !== -1) {
297+
return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)];
298+
}
299+
300+
return [str, '', ''];
301+
}

0 commit comments

Comments
 (0)