diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index a0ad416..cdc55cc 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.1.0-alpha.12"
+ ".": "0.1.0-alpha.13"
}
diff --git a/.stats.yml b/.stats.yml
index 4dfbf42..01c41ad 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,4 +1,4 @@
-configured_endpoints: 5
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/kernel%2Fkernel-1fe396b957ced73281fc0a61a69b630836aa5c89a8dccce2c5a1716bc9775e80.yml
-openapi_spec_hash: 9a0d67fb0781be034b77839584109638
-config_hash: df889df131f7438197abd59faace3c77
+configured_endpoints: 7
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/kernel%2Fkernel-c9d64df733f286f09d2203f4e3d820ce57e8d4c629c5e2db4e2bfac91fbc1598.yml
+openapi_spec_hash: fa407611fc566d55f403864fbfaa6c23
+config_hash: 7f67c5b95af1e4b39525515240b72275
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9c63519..c4d0dc7 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,14 @@
# Changelog
+## 0.1.0-alpha.13 (2025-05-19)
+
+Full Changelog: [v0.1.0-alpha.12...v0.1.0-alpha.13](https://github.com/onkernel/kernel-node-sdk/compare/v0.1.0-alpha.12...v0.1.0-alpha.13)
+
+### Features
+
+* **api:** update via SDK Studio ([bba7f08](https://github.com/onkernel/kernel-node-sdk/commit/bba7f08386eb92c2fcc5087a414e6d29f2ece821))
+* **api:** update via SDK Studio ([4c42d7c](https://github.com/onkernel/kernel-node-sdk/commit/4c42d7cdd5c0d25d48b2c5ea4bb1db9af009b279))
+
## 0.1.0-alpha.12 (2025-05-19)
Full Changelog: [v0.1.0-alpha.11...v0.1.0-alpha.12](https://github.com/onkernel/kernel-node-sdk/compare/v0.1.0-alpha.11...v0.1.0-alpha.12)
diff --git a/api.md b/api.md
index 1fc97fd..93c2c19 100644
--- a/api.md
+++ b/api.md
@@ -1,14 +1,24 @@
# Apps
+Types:
+
+- AppListResponse
+
+Methods:
+
+- client.apps.list({ ...params }) -> AppListResponse
+
## Deployments
Types:
- DeploymentCreateResponse
+- DeploymentFollowResponse
Methods:
- client.apps.deployments.create({ ...params }) -> DeploymentCreateResponse
+- client.apps.deployments.follow(id) -> DeploymentFollowResponse
## Invocations
diff --git a/package.json b/package.json
index 8534ef8..659707c 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@onkernel/sdk",
- "version": "0.1.0-alpha.12",
+ "version": "0.1.0-alpha.13",
"description": "The official TypeScript library for the Kernel API",
"author": "Kernel <>",
"types": "dist/index.d.ts",
diff --git a/src/client.ts b/src/client.ts
index 6c25324..d3725a3 100644
--- a/src/client.ts
+++ b/src/client.ts
@@ -30,7 +30,7 @@ import { readEnv } from './internal/utils/env';
import { formatRequestDetails, loggerFor } from './internal/utils/log';
import { isEmptyObj } from './internal/utils/values';
import { KernelApp } from './core/app-framework';
-import { Apps } from './resources/apps/apps';
+import { AppListParams, AppListResponse, Apps } from './resources/apps/apps';
const environments = {
production: 'https://api.onkernel.com/',
@@ -740,7 +740,7 @@ Kernel.Browsers = Browsers;
export declare namespace Kernel {
export type RequestOptions = Opts.RequestOptions;
- export { Apps as Apps };
+ export { Apps as Apps, type AppListResponse as AppListResponse, type AppListParams as AppListParams };
export {
Browsers as Browsers,
diff --git a/src/core/app-framework.ts b/src/core/app-framework.ts
index 97f5402..8099d9b 100644
--- a/src/core/app-framework.ts
+++ b/src/core/app-framework.ts
@@ -1,5 +1,5 @@
export interface KernelContext {
- invocationId: string;
+ invocation_id: string;
}
export interface KernelAction {
diff --git a/src/core/streaming.ts b/src/core/streaming.ts
new file mode 100644
index 0000000..7191ee4
--- /dev/null
+++ b/src/core/streaming.ts
@@ -0,0 +1,301 @@
+import { KernelError } from './error';
+import { type ReadableStream } from '../internal/shim-types';
+import { makeReadableStream } from '../internal/shims';
+import { findDoubleNewlineIndex, LineDecoder } from '../internal/decoders/line';
+import { ReadableStreamToAsyncIterable } from '../internal/shims';
+import { isAbortError } from '../internal/errors';
+import { encodeUTF8 } from '../internal/utils/bytes';
+
+type Bytes = string | ArrayBuffer | Uint8Array | null | undefined;
+
+export type ServerSentEvent = {
+ event: string | null;
+ data: string;
+ raw: string[];
+};
+
+export class Stream- implements AsyncIterable
- {
+ controller: AbortController;
+
+ constructor(
+ private iterator: () => AsyncIterator
- ,
+ controller: AbortController,
+ ) {
+ this.controller = controller;
+ }
+
+ static fromSSEResponse
- (response: Response, controller: AbortController): Stream
- {
+ let consumed = false;
+
+ async function* iterator(): AsyncIterator
- {
+ if (consumed) {
+ throw new KernelError('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
+ }
+ consumed = true;
+ let done = false;
+ try {
+ for await (const sse of _iterSSEMessages(response, controller)) {
+ try {
+ yield JSON.parse(sse.data);
+ } catch (e) {
+ console.error(`Could not parse message into JSON:`, sse.data);
+ console.error(`From chunk:`, sse.raw);
+ throw e;
+ }
+ }
+ done = true;
+ } catch (e) {
+ // If the user calls `stream.controller.abort()`, we should exit without throwing.
+ if (isAbortError(e)) return;
+ throw e;
+ } finally {
+ // If the user `break`s, abort the ongoing request.
+ if (!done) controller.abort();
+ }
+ }
+
+ return new Stream(iterator, controller);
+ }
+
+ /**
+ * Generates a Stream from a newline-separated ReadableStream
+ * where each item is a JSON value.
+ */
+ static fromReadableStream
- (readableStream: ReadableStream, controller: AbortController): Stream
- {
+ let consumed = false;
+
+ async function* iterLines(): AsyncGenerator {
+ const lineDecoder = new LineDecoder();
+
+ const iter = ReadableStreamToAsyncIterable(readableStream);
+ for await (const chunk of iter) {
+ for (const line of lineDecoder.decode(chunk)) {
+ yield line;
+ }
+ }
+
+ for (const line of lineDecoder.flush()) {
+ yield line;
+ }
+ }
+
+ async function* iterator(): AsyncIterator
- {
+ if (consumed) {
+ throw new KernelError('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');
+ }
+ consumed = true;
+ let done = false;
+ try {
+ for await (const line of iterLines()) {
+ if (done) continue;
+ if (line) yield JSON.parse(line);
+ }
+ done = true;
+ } catch (e) {
+ // If the user calls `stream.controller.abort()`, we should exit without throwing.
+ if (isAbortError(e)) return;
+ throw e;
+ } finally {
+ // If the user `break`s, abort the ongoing request.
+ if (!done) controller.abort();
+ }
+ }
+
+ return new Stream(iterator, controller);
+ }
+
+ [Symbol.asyncIterator](): AsyncIterator
- {
+ return this.iterator();
+ }
+
+ /**
+ * Splits the stream into two streams which can be
+ * independently read from at different speeds.
+ */
+ tee(): [Stream
- , Stream
- ] {
+ const left: Array>> = [];
+ const right: Array>> = [];
+ const iterator = this.iterator();
+
+ const teeIterator = (queue: Array>>): AsyncIterator
- => {
+ return {
+ next: () => {
+ if (queue.length === 0) {
+ const result = iterator.next();
+ left.push(result);
+ right.push(result);
+ }
+ return queue.shift()!;
+ },
+ };
+ };
+
+ return [
+ new Stream(() => teeIterator(left), this.controller),
+ new Stream(() => teeIterator(right), this.controller),
+ ];
+ }
+
+ /**
+ * Converts this stream to a newline-separated ReadableStream of
+ * JSON stringified values in the stream
+ * which can be turned back into a Stream with `Stream.fromReadableStream()`.
+ */
+ toReadableStream(): ReadableStream {
+ const self = this;
+ let iter: AsyncIterator
- ;
+
+ return makeReadableStream({
+ async start() {
+ iter = self[Symbol.asyncIterator]();
+ },
+ async pull(ctrl: any) {
+ try {
+ const { value, done } = await iter.next();
+ if (done) return ctrl.close();
+
+ const bytes = encodeUTF8(JSON.stringify(value) + '\n');
+
+ ctrl.enqueue(bytes);
+ } catch (err) {
+ ctrl.error(err);
+ }
+ },
+ async cancel() {
+ await iter.return?.();
+ },
+ });
+ }
+}
+
+export async function* _iterSSEMessages(
+ response: Response,
+ controller: AbortController,
+): AsyncGenerator {
+ if (!response.body) {
+ controller.abort();
+ if (
+ typeof (globalThis as any).navigator !== 'undefined' &&
+ (globalThis as any).navigator.product === 'ReactNative'
+ ) {
+ throw new KernelError(
+ `The default react-native fetch implementation does not support streaming. Please use expo/fetch: https://docs.expo.dev/versions/latest/sdk/expo/#expofetch-api`,
+ );
+ }
+ throw new KernelError(`Attempted to iterate over a response with no body`);
+ }
+
+ const sseDecoder = new SSEDecoder();
+ const lineDecoder = new LineDecoder();
+
+ const iter = ReadableStreamToAsyncIterable(response.body);
+ for await (const sseChunk of iterSSEChunks(iter)) {
+ for (const line of lineDecoder.decode(sseChunk)) {
+ const sse = sseDecoder.decode(line);
+ if (sse) yield sse;
+ }
+ }
+
+ for (const line of lineDecoder.flush()) {
+ const sse = sseDecoder.decode(line);
+ if (sse) yield sse;
+ }
+}
+
+/**
+ * Given an async iterable iterator, iterates over it and yields full
+ * SSE chunks, i.e. yields when a double new-line is encountered.
+ */
+async function* iterSSEChunks(iterator: AsyncIterableIterator): AsyncGenerator {
+ let data = new Uint8Array();
+
+ for await (const chunk of iterator) {
+ if (chunk == null) {
+ continue;
+ }
+
+ const binaryChunk =
+ chunk instanceof ArrayBuffer ? new Uint8Array(chunk)
+ : typeof chunk === 'string' ? encodeUTF8(chunk)
+ : chunk;
+
+ let newData = new Uint8Array(data.length + binaryChunk.length);
+ newData.set(data);
+ newData.set(binaryChunk, data.length);
+ data = newData;
+
+ let patternIndex;
+ while ((patternIndex = findDoubleNewlineIndex(data)) !== -1) {
+ yield data.slice(0, patternIndex);
+ data = data.slice(patternIndex);
+ }
+ }
+
+ if (data.length > 0) {
+ yield data;
+ }
+}
+
+class SSEDecoder {
+ private data: string[];
+ private event: string | null;
+ private chunks: string[];
+
+ constructor() {
+ this.event = null;
+ this.data = [];
+ this.chunks = [];
+ }
+
+ decode(line: string) {
+ if (line.endsWith('\r')) {
+ line = line.substring(0, line.length - 1);
+ }
+
+ if (!line) {
+ // empty line and we didn't previously encounter any messages
+ if (!this.event && !this.data.length) return null;
+
+ const sse: ServerSentEvent = {
+ event: this.event,
+ data: this.data.join('\n'),
+ raw: this.chunks,
+ };
+
+ this.event = null;
+ this.data = [];
+ this.chunks = [];
+
+ return sse;
+ }
+
+ this.chunks.push(line);
+
+ if (line.startsWith(':')) {
+ return null;
+ }
+
+ let [fieldname, _, value] = partition(line, ':');
+
+ if (value.startsWith(' ')) {
+ value = value.substring(1);
+ }
+
+ if (fieldname === 'event') {
+ this.event = value;
+ } else if (fieldname === 'data') {
+ this.data.push(value);
+ }
+
+ return null;
+ }
+}
+
+function partition(str: string, delimiter: string): [string, string, string] {
+ const index = str.indexOf(delimiter);
+ if (index !== -1) {
+ return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)];
+ }
+
+ return [str, '', ''];
+}
diff --git a/src/internal/decoders/line.ts b/src/internal/decoders/line.ts
new file mode 100644
index 0000000..b3bfa97
--- /dev/null
+++ b/src/internal/decoders/line.ts
@@ -0,0 +1,135 @@
+import { concatBytes, decodeUTF8, encodeUTF8 } from '../utils/bytes';
+
+export type Bytes = string | ArrayBuffer | Uint8Array | null | undefined;
+
+/**
+ * A re-implementation of httpx's `LineDecoder` in Python that handles incrementally
+ * reading lines from text.
+ *
+ * https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258
+ */
+export class LineDecoder {
+ // prettier-ignore
+ static NEWLINE_CHARS = new Set(['\n', '\r']);
+ static NEWLINE_REGEXP = /\r\n|[\n\r]/g;
+
+ #buffer: Uint8Array;
+ #carriageReturnIndex: number | null;
+
+ constructor() {
+ this.#buffer = new Uint8Array();
+ this.#carriageReturnIndex = null;
+ }
+
+ decode(chunk: Bytes): string[] {
+ if (chunk == null) {
+ return [];
+ }
+
+ const binaryChunk =
+ chunk instanceof ArrayBuffer ? new Uint8Array(chunk)
+ : typeof chunk === 'string' ? encodeUTF8(chunk)
+ : chunk;
+
+ this.#buffer = concatBytes([this.#buffer, binaryChunk]);
+
+ const lines: string[] = [];
+ let patternIndex;
+ while ((patternIndex = findNewlineIndex(this.#buffer, this.#carriageReturnIndex)) != null) {
+ if (patternIndex.carriage && this.#carriageReturnIndex == null) {
+ // skip until we either get a corresponding `\n`, a new `\r` or nothing
+ this.#carriageReturnIndex = patternIndex.index;
+ continue;
+ }
+
+ // we got double \r or \rtext\n
+ if (
+ this.#carriageReturnIndex != null &&
+ (patternIndex.index !== this.#carriageReturnIndex + 1 || patternIndex.carriage)
+ ) {
+ lines.push(decodeUTF8(this.#buffer.subarray(0, this.#carriageReturnIndex - 1)));
+ this.#buffer = this.#buffer.subarray(this.#carriageReturnIndex);
+ this.#carriageReturnIndex = null;
+ continue;
+ }
+
+ const endIndex =
+ this.#carriageReturnIndex !== null ? patternIndex.preceding - 1 : patternIndex.preceding;
+
+ const line = decodeUTF8(this.#buffer.subarray(0, endIndex));
+ lines.push(line);
+
+ this.#buffer = this.#buffer.subarray(patternIndex.index);
+ this.#carriageReturnIndex = null;
+ }
+
+ return lines;
+ }
+
+ flush(): string[] {
+ if (!this.#buffer.length) {
+ return [];
+ }
+ return this.decode('\n');
+ }
+}
+
+/**
+ * This function searches the buffer for the end patterns, (\r or \n)
+ * and returns an object with the index preceding the matched newline and the
+ * index after the newline char. `null` is returned if no new line is found.
+ *
+ * ```ts
+ * findNewLineIndex('abc\ndef') -> { preceding: 2, index: 3 }
+ * ```
+ */
+function findNewlineIndex(
+ buffer: Uint8Array,
+ startIndex: number | null,
+): { preceding: number; index: number; carriage: boolean } | null {
+ const newline = 0x0a; // \n
+ const carriage = 0x0d; // \r
+
+ for (let i = startIndex ?? 0; i < buffer.length; i++) {
+ if (buffer[i] === newline) {
+ return { preceding: i, index: i + 1, carriage: false };
+ }
+
+ if (buffer[i] === carriage) {
+ return { preceding: i, index: i + 1, carriage: true };
+ }
+ }
+
+ return null;
+}
+
+export function findDoubleNewlineIndex(buffer: Uint8Array): number {
+ // This function searches the buffer for the end patterns (\r\r, \n\n, \r\n\r\n)
+ // and returns the index right after the first occurrence of any pattern,
+ // or -1 if none of the patterns are found.
+ const newline = 0x0a; // \n
+ const carriage = 0x0d; // \r
+
+ for (let i = 0; i < buffer.length - 1; i++) {
+ if (buffer[i] === newline && buffer[i + 1] === newline) {
+ // \n\n
+ return i + 2;
+ }
+ if (buffer[i] === carriage && buffer[i + 1] === carriage) {
+ // \r\r
+ return i + 2;
+ }
+ if (
+ buffer[i] === carriage &&
+ buffer[i + 1] === newline &&
+ i + 3 < buffer.length &&
+ buffer[i + 2] === carriage &&
+ buffer[i + 3] === newline
+ ) {
+ // \r\n\r\n
+ return i + 4;
+ }
+ }
+
+ return -1;
+}
diff --git a/src/internal/parse.ts b/src/internal/parse.ts
index 53d4b80..18d0b41 100644
--- a/src/internal/parse.ts
+++ b/src/internal/parse.ts
@@ -1,6 +1,7 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import type { FinalRequestOptions } from './request-options';
+import { Stream } from '../core/streaming';
import { type Kernel } from '../client';
import { formatRequestDetails, loggerFor } from './utils/log';
@@ -16,6 +17,19 @@ export type APIResponseProps = {
export async function defaultParseResponse(client: Kernel, props: APIResponseProps): Promise {
const { response, requestLogID, retryOfRequestLogID, startTime } = props;
const body = await (async () => {
+ if (props.options.stream) {
+ loggerFor(client).debug('response', response.status, response.url, response.headers, response.body);
+
+ // Note: there is an invariant here that isn't represented in the type system
+ // that if you set `stream: true` the response type must also be `Stream`
+
+ if (props.options.__streamClass) {
+ return props.options.__streamClass.fromSSEResponse(response, props.controller) as any;
+ }
+
+ return Stream.fromSSEResponse(response, props.controller) as any;
+ }
+
// fetch refuses to read the body when the status code is 204.
if (response.status === 204) {
return null as T;
diff --git a/src/internal/request-options.ts b/src/internal/request-options.ts
index d2ade9e..ef09931 100644
--- a/src/internal/request-options.ts
+++ b/src/internal/request-options.ts
@@ -3,6 +3,7 @@
import { NullableHeaders } from './headers';
import type { BodyInit } from './builtin-types';
+import { Stream } from '../core/streaming';
import type { HTTPMethod, MergedRequestInit } from './types';
import { type HeadersLike } from './headers';
@@ -22,6 +23,7 @@ export type RequestOptions = {
idempotencyKey?: string;
__binaryResponse?: boolean | undefined;
+ __streamClass?: typeof Stream;
};
export type EncodedContent = { bodyHeaders: HeadersLike; body: BodyInit };
diff --git a/src/resources/apps/apps.ts b/src/resources/apps/apps.ts
index fe32c1e..7db2293 100644
--- a/src/resources/apps/apps.ts
+++ b/src/resources/apps/apps.ts
@@ -2,7 +2,12 @@
import { APIResource } from '../../core/resource';
import * as DeploymentsAPI from './deployments';
-import { DeploymentCreateParams, DeploymentCreateResponse, Deployments } from './deployments';
+import {
+ DeploymentCreateParams,
+ DeploymentCreateResponse,
+ DeploymentFollowResponse,
+ Deployments,
+} from './deployments';
import * as InvocationsAPI from './invocations';
import {
InvocationCreateParams,
@@ -10,19 +15,83 @@ import {
InvocationRetrieveResponse,
Invocations,
} from './invocations';
+import { APIPromise } from '../../core/api-promise';
+import { RequestOptions } from '../../internal/request-options';
export class Apps extends APIResource {
deployments: DeploymentsAPI.Deployments = new DeploymentsAPI.Deployments(this._client);
invocations: InvocationsAPI.Invocations = new InvocationsAPI.Invocations(this._client);
+
+ /**
+ * List application versions for the authenticated user. Optionally filter by app
+ * name and/or version label.
+ *
+ * @example
+ * ```ts
+ * const apps = await client.apps.list();
+ * ```
+ */
+ list(query: AppListParams | null | undefined = {}, options?: RequestOptions): APIPromise {
+ return this._client.get('/apps', { query, ...options });
+ }
+}
+
+export type AppListResponse = Array;
+
+export namespace AppListResponse {
+ /**
+ * Summary of an application version.
+ */
+ export interface AppListResponseItem {
+ /**
+ * Unique identifier for the app version
+ */
+ id: string;
+
+ /**
+ * Name of the application
+ */
+ app_name: string;
+
+ /**
+ * Deployment region code
+ */
+ region: string;
+
+ /**
+ * Version label for the application
+ */
+ version: string;
+
+ /**
+ * Environment variables configured for this app version
+ */
+ env_vars?: Record;
+ }
+}
+
+export interface AppListParams {
+ /**
+ * Filter results by application name.
+ */
+ app_name?: string;
+
+ /**
+ * Filter results by version label.
+ */
+ version?: string;
}
Apps.Deployments = Deployments;
Apps.Invocations = Invocations;
export declare namespace Apps {
+ export { type AppListResponse as AppListResponse, type AppListParams as AppListParams };
+
export {
Deployments as Deployments,
type DeploymentCreateResponse as DeploymentCreateResponse,
+ type DeploymentFollowResponse as DeploymentFollowResponse,
type DeploymentCreateParams as DeploymentCreateParams,
};
diff --git a/src/resources/apps/deployments.ts b/src/resources/apps/deployments.ts
index 78a1c4d..e4b7811 100644
--- a/src/resources/apps/deployments.ts
+++ b/src/resources/apps/deployments.ts
@@ -2,9 +2,12 @@
import { APIResource } from '../../core/resource';
import { APIPromise } from '../../core/api-promise';
+import { Stream } from '../../core/streaming';
import { type Uploadable } from '../../core/uploads';
+import { buildHeaders } from '../../internal/headers';
import { RequestOptions } from '../../internal/request-options';
import { multipartFormRequestOptions } from '../../internal/uploads';
+import { path } from '../../internal/utils/path';
export class Deployments extends APIResource {
/**
@@ -21,6 +24,24 @@ export class Deployments extends APIResource {
create(body: DeploymentCreateParams, options?: RequestOptions): APIPromise {
return this._client.post('/deploy', multipartFormRequestOptions({ body, ...options }, this._client));
}
+
+ /**
+ * Establishes a Server-Sent Events (SSE) stream that delivers real-time logs and
+ * status updates for a deployed application. The stream terminates automatically
+ * once the application reaches a terminal state.
+ *
+ * @example
+ * ```ts
+ * const response = await client.apps.deployments.follow('id');
+ * ```
+ */
+ follow(id: string, options?: RequestOptions): APIPromise> {
+ return this._client.get(path`/apps/${id}/events`, {
+ ...options,
+ headers: buildHeaders([{ Accept: 'text/event-stream' }, options?.headers]),
+ stream: true,
+ }) as APIPromise>;
+ }
}
export interface DeploymentCreateResponse {
@@ -68,6 +89,77 @@ export namespace DeploymentCreateResponse {
}
}
+/**
+ * A stream of application events (state updates and logs) in SSE format.
+ */
+export type DeploymentFollowResponse = Array<
+ | DeploymentFollowResponse.StateEvent
+ | DeploymentFollowResponse.StateUpdateEvent
+ | DeploymentFollowResponse.LogEvent
+>;
+
+export namespace DeploymentFollowResponse {
+ /**
+ * Initial state of the application, emitted once when subscribing.
+ */
+ export interface StateEvent {
+ /**
+ * Event type identifier (always "state").
+ */
+ event: 'state';
+
+ /**
+ * Current application state (e.g., "deploying", "running", "succeeded", "failed").
+ */
+ state: string;
+
+ /**
+ * Time the state was reported.
+ */
+ timestamp?: string;
+ }
+
+ /**
+ * An update emitted when the application's state changes.
+ */
+ export interface StateUpdateEvent {
+ /**
+ * Event type identifier (always "state_update").
+ */
+ event: 'state_update';
+
+ /**
+ * New application state (e.g., "running", "succeeded", "failed").
+ */
+ state: string;
+
+ /**
+ * Time the state change occurred.
+ */
+ timestamp?: string;
+ }
+
+ /**
+ * A log entry from the application.
+ */
+ export interface LogEvent {
+ /**
+ * Event type identifier (always "log").
+ */
+ event: 'log';
+
+ /**
+ * Log message text.
+ */
+ message: string;
+
+ /**
+ * Time the log entry was produced.
+ */
+ timestamp?: string;
+ }
+}
+
export interface DeploymentCreateParams {
/**
* Relative path to the entrypoint of the application
@@ -104,6 +196,7 @@ export interface DeploymentCreateParams {
export declare namespace Deployments {
export {
type DeploymentCreateResponse as DeploymentCreateResponse,
+ type DeploymentFollowResponse as DeploymentFollowResponse,
type DeploymentCreateParams as DeploymentCreateParams,
};
}
diff --git a/src/resources/apps/index.ts b/src/resources/apps/index.ts
index 1703ae2..52ecdea 100644
--- a/src/resources/apps/index.ts
+++ b/src/resources/apps/index.ts
@@ -1,7 +1,12 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-export { Apps } from './apps';
-export { Deployments, type DeploymentCreateResponse, type DeploymentCreateParams } from './deployments';
+export { Apps, type AppListResponse, type AppListParams } from './apps';
+export {
+ Deployments,
+ type DeploymentCreateResponse,
+ type DeploymentFollowResponse,
+ type DeploymentCreateParams,
+} from './deployments';
export {
Invocations,
type InvocationCreateResponse,
diff --git a/src/resources/index.ts b/src/resources/index.ts
index d8d49ae..ecbf870 100644
--- a/src/resources/index.ts
+++ b/src/resources/index.ts
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-export { Apps } from './apps/apps';
+export { Apps, type AppListResponse, type AppListParams } from './apps/apps';
export {
Browsers,
type BrowserCreateResponse,
diff --git a/src/streaming.ts b/src/streaming.ts
new file mode 100644
index 0000000..9e6da10
--- /dev/null
+++ b/src/streaming.ts
@@ -0,0 +1,2 @@
+/** @deprecated Import from ./core/streaming instead */
+export * from './core/streaming';
diff --git a/src/version.ts b/src/version.ts
index de5a5a5..97229ce 100644
--- a/src/version.ts
+++ b/src/version.ts
@@ -1 +1 @@
-export const VERSION = '0.1.0-alpha.12'; // x-release-please-version
+export const VERSION = '0.1.0-alpha.13'; // x-release-please-version
diff --git a/tests/api-resources/apps/apps.test.ts b/tests/api-resources/apps/apps.test.ts
new file mode 100644
index 0000000..6bb4e0d
--- /dev/null
+++ b/tests/api-resources/apps/apps.test.ts
@@ -0,0 +1,30 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import Kernel from '@onkernel/sdk';
+
+const client = new Kernel({
+ apiKey: 'My API Key',
+ baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
+});
+
+describe('resource apps', () => {
+ // skipped: tests are disabled for the time being
+ test.skip('list', async () => {
+ const responsePromise = client.apps.list();
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ // skipped: tests are disabled for the time being
+ test.skip('list: request options and params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ client.apps.list({ app_name: 'app_name', version: 'version' }, { path: '/_stainless_unknown_path' }),
+ ).rejects.toThrow(Kernel.NotFoundError);
+ });
+});
diff --git a/tests/api-resources/apps/deployments.test.ts b/tests/api-resources/apps/deployments.test.ts
index f576237..7978198 100644
--- a/tests/api-resources/apps/deployments.test.ts
+++ b/tests/api-resources/apps/deployments.test.ts
@@ -34,4 +34,16 @@ describe('resource deployments', () => {
version: '1.0.0',
});
});
+
+ // skipped: currently no good way to test endpoints with content type text/event-stream, Prism mock server will fail
+ test.skip('follow', async () => {
+ const responsePromise = client.apps.deployments.follow('id');
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
});
diff --git a/tests/internal/decoders/line.test.ts b/tests/internal/decoders/line.test.ts
new file mode 100644
index 0000000..1146678
--- /dev/null
+++ b/tests/internal/decoders/line.test.ts
@@ -0,0 +1,128 @@
+import { findDoubleNewlineIndex, LineDecoder } from '@onkernel/sdk/internal/decoders/line';
+
+function decodeChunks(chunks: string[], { flush }: { flush: boolean } = { flush: false }): string[] {
+ const decoder = new LineDecoder();
+ const lines: string[] = [];
+ for (const chunk of chunks) {
+ lines.push(...decoder.decode(chunk));
+ }
+
+ if (flush) {
+ lines.push(...decoder.flush());
+ }
+
+ return lines;
+}
+
+describe('line decoder', () => {
+ test('basic', () => {
+ // baz is not included because the line hasn't ended yet
+ expect(decodeChunks(['foo', ' bar\nbaz'])).toEqual(['foo bar']);
+ });
+
+ test('basic with \\r', () => {
+ expect(decodeChunks(['foo', ' bar\r\nbaz'])).toEqual(['foo bar']);
+ expect(decodeChunks(['foo', ' bar\r\nbaz'], { flush: true })).toEqual(['foo bar', 'baz']);
+ });
+
+ test('trailing new lines', () => {
+ expect(decodeChunks(['foo', ' bar', 'baz\n', 'thing\n'])).toEqual(['foo barbaz', 'thing']);
+ });
+
+ test('trailing new lines with \\r', () => {
+ expect(decodeChunks(['foo', ' bar', 'baz\r\n', 'thing\r\n'])).toEqual(['foo barbaz', 'thing']);
+ });
+
+ test('escaped new lines', () => {
+ expect(decodeChunks(['foo', ' bar\\nbaz\n'])).toEqual(['foo bar\\nbaz']);
+ });
+
+ test('escaped new lines with \\r', () => {
+ expect(decodeChunks(['foo', ' bar\\r\\nbaz\n'])).toEqual(['foo bar\\r\\nbaz']);
+ });
+
+ test('\\r & \\n split across multiple chunks', () => {
+ expect(decodeChunks(['foo\r', '\n', 'bar'], { flush: true })).toEqual(['foo', 'bar']);
+ });
+
+ test('single \\r', () => {
+ expect(decodeChunks(['foo\r', 'bar'], { flush: true })).toEqual(['foo', 'bar']);
+ });
+
+ test('double \\r', () => {
+ expect(decodeChunks(['foo\r', 'bar\r'], { flush: true })).toEqual(['foo', 'bar']);
+ expect(decodeChunks(['foo\r', '\r', 'bar'], { flush: true })).toEqual(['foo', '', 'bar']);
+ // implementation detail that we don't yield the single \r line until a new \r or \n is encountered
+ expect(decodeChunks(['foo\r', '\r', 'bar'], { flush: false })).toEqual(['foo']);
+ });
+
+ test('double \\r then \\r\\n', () => {
+ expect(decodeChunks(['foo\r', '\r', '\r', '\n', 'bar', '\n'])).toEqual(['foo', '', '', 'bar']);
+ expect(decodeChunks(['foo\n', '\n', '\n', 'bar', '\n'])).toEqual(['foo', '', '', 'bar']);
+ });
+
+ test('double newline', () => {
+ expect(decodeChunks(['foo\n\nbar'], { flush: true })).toEqual(['foo', '', 'bar']);
+ expect(decodeChunks(['foo', '\n', '\nbar'], { flush: true })).toEqual(['foo', '', 'bar']);
+ expect(decodeChunks(['foo\n', '\n', 'bar'], { flush: true })).toEqual(['foo', '', 'bar']);
+ expect(decodeChunks(['foo', '\n', '\n', 'bar'], { flush: true })).toEqual(['foo', '', 'bar']);
+ });
+
+ test('multi-byte characters across chunks', () => {
+ const decoder = new LineDecoder();
+
+ // bytes taken from the string 'известни' and arbitrarily split
+ // so that some multi-byte characters span multiple chunks
+ expect(decoder.decode(new Uint8Array([0xd0]))).toHaveLength(0);
+ expect(decoder.decode(new Uint8Array([0xb8, 0xd0, 0xb7, 0xd0]))).toHaveLength(0);
+ expect(
+ decoder.decode(new Uint8Array([0xb2, 0xd0, 0xb5, 0xd1, 0x81, 0xd1, 0x82, 0xd0, 0xbd, 0xd0, 0xb8])),
+ ).toHaveLength(0);
+
+ const decoded = decoder.decode(new Uint8Array([0xa]));
+ expect(decoded).toEqual(['известни']);
+ });
+
+ test('flushing trailing newlines', () => {
+ expect(decodeChunks(['foo\n', '\nbar'], { flush: true })).toEqual(['foo', '', 'bar']);
+ });
+
+ test('flushing empty buffer', () => {
+ expect(decodeChunks([], { flush: true })).toEqual([]);
+ });
+});
+
+describe('findDoubleNewlineIndex', () => {
+ test('finds \\n\\n', () => {
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\n\nbar'))).toBe(5);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('\n\nbar'))).toBe(2);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\n\n'))).toBe(5);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('\n\n'))).toBe(2);
+ });
+
+ test('finds \\r\\r', () => {
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\r\rbar'))).toBe(5);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('\r\rbar'))).toBe(2);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\r\r'))).toBe(5);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('\r\r'))).toBe(2);
+ });
+
+ test('finds \\r\\n\\r\\n', () => {
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\r\n\r\nbar'))).toBe(7);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('\r\n\r\nbar'))).toBe(4);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\r\n\r\n'))).toBe(7);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('\r\n\r\n'))).toBe(4);
+ });
+
+ test('returns -1 when no double newline found', () => {
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\nbar'))).toBe(-1);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\rbar'))).toBe(-1);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\r\nbar'))).toBe(-1);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode(''))).toBe(-1);
+ });
+
+ test('handles incomplete patterns', () => {
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\r\n\r'))).toBe(-1);
+ expect(findDoubleNewlineIndex(new TextEncoder().encode('foo\r\n'))).toBe(-1);
+ });
+});
diff --git a/tests/streaming.test.ts b/tests/streaming.test.ts
new file mode 100644
index 0000000..0124e21
--- /dev/null
+++ b/tests/streaming.test.ts
@@ -0,0 +1,219 @@
+import assert from 'assert';
+import { _iterSSEMessages } from '@onkernel/sdk/core/streaming';
+import { ReadableStreamFrom } from '@onkernel/sdk/internal/shims';
+
+describe('streaming decoding', () => {
+ test('basic', async () => {
+ async function* body(): AsyncGenerator {
+ yield Buffer.from('event: completion\n');
+ yield Buffer.from('data: {"foo":true}\n');
+ yield Buffer.from('\n');
+ }
+
+ const stream = _iterSSEMessages(new Response(ReadableStreamFrom(body())), new AbortController())[
+ Symbol.asyncIterator
+ ]();
+
+ let event = await stream.next();
+ assert(event.value);
+ expect(JSON.parse(event.value.data)).toEqual({ foo: true });
+
+ event = await stream.next();
+ expect(event.done).toBeTruthy();
+ });
+
+ test('data without event', async () => {
+ async function* body(): AsyncGenerator {
+ yield Buffer.from('data: {"foo":true}\n');
+ yield Buffer.from('\n');
+ }
+
+ const stream = _iterSSEMessages(new Response(ReadableStreamFrom(body())), new AbortController())[
+ Symbol.asyncIterator
+ ]();
+
+ let event = await stream.next();
+ assert(event.value);
+ expect(event.value.event).toBeNull();
+ expect(JSON.parse(event.value.data)).toEqual({ foo: true });
+
+ event = await stream.next();
+ expect(event.done).toBeTruthy();
+ });
+
+ test('event without data', async () => {
+ async function* body(): AsyncGenerator {
+ yield Buffer.from('event: foo\n');
+ yield Buffer.from('\n');
+ }
+
+ const stream = _iterSSEMessages(new Response(ReadableStreamFrom(body())), new AbortController())[
+ Symbol.asyncIterator
+ ]();
+
+ let event = await stream.next();
+ assert(event.value);
+ expect(event.value.event).toEqual('foo');
+ expect(event.value.data).toEqual('');
+
+ event = await stream.next();
+ expect(event.done).toBeTruthy();
+ });
+
+ test('multiple events', async () => {
+ async function* body(): AsyncGenerator {
+ yield Buffer.from('event: foo\n');
+ yield Buffer.from('\n');
+ yield Buffer.from('event: ping\n');
+ yield Buffer.from('\n');
+ }
+
+ const stream = _iterSSEMessages(new Response(ReadableStreamFrom(body())), new AbortController())[
+ Symbol.asyncIterator
+ ]();
+
+ let event = await stream.next();
+ assert(event.value);
+ expect(event.value.event).toEqual('foo');
+ expect(event.value.data).toEqual('');
+
+ event = await stream.next();
+ assert(event.value);
+ expect(event.value.event).toEqual('ping');
+ expect(event.value.data).toEqual('');
+
+ event = await stream.next();
+ expect(event.done).toBeTruthy();
+ });
+
+ test('multiple events with data', async () => {
+ async function* body(): AsyncGenerator {
+ yield Buffer.from('event: foo\n');
+ yield Buffer.from('data: {"foo":true}\n');
+ yield Buffer.from('\n');
+ yield Buffer.from('event: ping\n');
+ yield Buffer.from('data: {"bar":false}\n');
+ yield Buffer.from('\n');
+ }
+
+ const stream = _iterSSEMessages(new Response(ReadableStreamFrom(body())), new AbortController())[
+ Symbol.asyncIterator
+ ]();
+
+ let event = await stream.next();
+ assert(event.value);
+ expect(event.value.event).toEqual('foo');
+ expect(JSON.parse(event.value.data)).toEqual({ foo: true });
+
+ event = await stream.next();
+ assert(event.value);
+ expect(event.value.event).toEqual('ping');
+ expect(JSON.parse(event.value.data)).toEqual({ bar: false });
+
+ event = await stream.next();
+ expect(event.done).toBeTruthy();
+ });
+
+ test('multiple data lines with empty line', async () => {
+ async function* body(): AsyncGenerator {
+ yield Buffer.from('event: ping\n');
+ yield Buffer.from('data: {\n');
+ yield Buffer.from('data: "foo":\n');
+ yield Buffer.from('data: \n');
+ yield Buffer.from('data:\n');
+ yield Buffer.from('data: true}\n');
+ yield Buffer.from('\n\n');
+ }
+
+ const stream = _iterSSEMessages(new Response(ReadableStreamFrom(body())), new AbortController())[
+ Symbol.asyncIterator
+ ]();
+
+ let event = await stream.next();
+ assert(event.value);
+ expect(event.value.event).toEqual('ping');
+ expect(JSON.parse(event.value.data)).toEqual({ foo: true });
+ expect(event.value.data).toEqual('{\n"foo":\n\n\ntrue}');
+
+ event = await stream.next();
+ expect(event.done).toBeTruthy();
+ });
+
+ test('data json escaped double new line', async () => {
+ async function* body(): AsyncGenerator {
+ yield Buffer.from('event: ping\n');
+ yield Buffer.from('data: {"foo": "my long\\n\\ncontent"}');
+ yield Buffer.from('\n\n');
+ }
+
+ const stream = _iterSSEMessages(new Response(ReadableStreamFrom(body())), new AbortController())[
+ Symbol.asyncIterator
+ ]();
+
+ let event = await stream.next();
+ assert(event.value);
+ expect(event.value.event).toEqual('ping');
+ expect(JSON.parse(event.value.data)).toEqual({ foo: 'my long\n\ncontent' });
+
+ event = await stream.next();
+ expect(event.done).toBeTruthy();
+ });
+
+ test('special new line characters', async () => {
+ async function* body(): AsyncGenerator {
+ yield Buffer.from('data: {"content": "culpa "}\n');
+ yield Buffer.from('\n');
+ yield Buffer.from('data: {"content": "');
+ yield Buffer.from([0xe2, 0x80, 0xa8]);
+ yield Buffer.from('"}\n');
+ yield Buffer.from('\n');
+ yield Buffer.from('data: {"content": "foo"}\n');
+ yield Buffer.from('\n');
+ }
+
+ const stream = _iterSSEMessages(new Response(ReadableStreamFrom(body())), new AbortController())[
+ Symbol.asyncIterator
+ ]();
+
+ let event = await stream.next();
+ assert(event.value);
+ expect(JSON.parse(event.value.data)).toEqual({ content: 'culpa ' });
+
+ event = await stream.next();
+ assert(event.value);
+ expect(JSON.parse(event.value.data)).toEqual({ content: Buffer.from([0xe2, 0x80, 0xa8]).toString() });
+
+ event = await stream.next();
+ assert(event.value);
+ expect(JSON.parse(event.value.data)).toEqual({ content: 'foo' });
+
+ event = await stream.next();
+ expect(event.done).toBeTruthy();
+ });
+
+ test('multi-byte characters across chunks', async () => {
+ async function* body(): AsyncGenerator {
+ yield Buffer.from('event: completion\n');
+ yield Buffer.from('data: {"content": "');
+ // bytes taken from the string 'известни' and arbitrarily split
+ // so that some multi-byte characters span multiple chunks
+ yield Buffer.from([0xd0]);
+ yield Buffer.from([0xb8, 0xd0, 0xb7, 0xd0]);
+ yield Buffer.from([0xb2, 0xd0, 0xb5, 0xd1, 0x81, 0xd1, 0x82, 0xd0, 0xbd, 0xd0, 0xb8]);
+ yield Buffer.from('"}\n');
+ yield Buffer.from('\n');
+ }
+
+ const stream = _iterSSEMessages(new Response(ReadableStreamFrom(body())), new AbortController())[
+ Symbol.asyncIterator
+ ]();
+
+ let event = await stream.next();
+ assert(event.value);
+ expect(event.value.event).toEqual('completion');
+ expect(JSON.parse(event.value.data)).toEqual({ content: 'известни' });
+
+ event = await stream.next();
+ expect(event.done).toBeTruthy();
+ });
+});