Skip to content

Commit c385203

Browse files
committed
fixes
1 parent dd6fe0a commit c385203

File tree

15 files changed

+130
-163
lines changed

15 files changed

+130
-163
lines changed

.eslintrc.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ module.exports = {
1919
},
2020
overrides: [
2121
{
22-
files: ['tests/**', 'examples/**'],
22+
files: ['tests/**', 'examples/**', 'ecosystem-tests/**'],
2323
rules: {
2424
'no-restricted-imports': 'off',
2525
},

src/api-promise.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ export class APIPromise<T> extends Promise<WithRequestID<T>> {
6262
* Try setting `"moduleResolution": "NodeNext"` or add `"lib": ["DOM"]`
6363
* to your `tsconfig.json`.
6464
*/
65-
async withResponse(): Promise<{ data: T; response: Response }> {
65+
async withResponse(): Promise<{ data: T; response: Response; request_id: string | null }> {
6666
const [data, response] = await Promise.all([this.parse(), this.asResponse()]);
6767
return { data, response, request_id: response.headers.get('x-request-id') };
6868
}

src/azure.ts

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -124,11 +124,10 @@ export class AzureOpenAI extends OpenAI {
124124
this._deployment = deployment;
125125
}
126126

127-
override buildRequest(options: FinalRequestOptions<unknown>): {
128-
req: RequestInit;
129-
url: string;
130-
timeout: number;
131-
} {
127+
override buildRequest(
128+
options: FinalRequestOptions,
129+
props: { retryCount?: number } = {},
130+
): { req: RequestInit & { headers: Headers }; url: string; timeout: number } {
132131
if (_deployments_endpoints.has(options.path) && options.method === 'post' && options.body !== undefined) {
133132
if (!isObj(options.body)) {
134133
throw new Error('Expected request body to be an object');
@@ -138,7 +137,7 @@ export class AzureOpenAI extends OpenAI {
138137
options.path = `/deployments/${model}${options.path}`;
139138
}
140139
}
141-
return super.buildRequest(options);
140+
return super.buildRequest(options, props);
142141
}
143142

144143
private async _getAzureADToken(): Promise<string | undefined> {
@@ -159,21 +158,23 @@ export class AzureOpenAI extends OpenAI {
159158
}
160159

161160
protected override async prepareOptions(opts: FinalRequestOptions): Promise<void> {
161+
opts.headers = buildHeaders([opts.headers]);
162+
162163
/**
163164
* The user should provide a bearer token provider if they want
164165
* to use Azure AD authentication. The user shouldn't set the
165166
* Authorization header manually because the header is overwritten
166167
* with the Azure AD token if a bearer token provider is provided.
167168
*/
168-
if (opts.headers?.['api-key']) {
169+
if (opts.headers.values.get('Authorization') || opts.headers.values.get('api-key')) {
169170
return super.prepareOptions(opts);
170171
}
172+
171173
const token = await this._getAzureADToken();
172-
opts.headers ??= {};
173174
if (token) {
174-
opts.headers['Authorization'] = `Bearer ${token}`;
175+
opts.headers.values.set('Authorization', `Bearer ${token}`);
175176
} else if (this.apiKey !== API_KEY_SENTINEL) {
176-
opts.headers['api-key'] = this.apiKey;
177+
opts.headers.values.set('api-key', this.apiKey);
177178
} else {
178179
throw new Errors.OpenAIError('Unable to handle auth');
179180
}
@@ -189,6 +190,7 @@ const _deployments_endpoints = new Set([
189190
'/audio/translations',
190191
'/audio/speech',
191192
'/images/generations',
193+
'/batches',
192194
]);
193195

194196
const API_KEY_SENTINEL = '<Missing Key>';

src/lib/AbstractChatCompletionRunner.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import * as Core from '../core';
21
import { type CompletionUsage } from '../resources/completions';
32
import {
43
type ChatCompletion,
@@ -24,9 +23,10 @@ import { BaseEvents, EventStream } from './EventStream';
2423
import { ParsedChatCompletion } from '../resources/beta/chat/completions';
2524
import OpenAI from '../index';
2625
import { isAutoParsableTool, parseChatCompletion } from '../lib/parser';
26+
import { RequestOptions } from '../internal/request-options';
2727

2828
const DEFAULT_MAX_CHAT_COMPLETIONS = 10;
29-
export interface RunnerOptions extends Core.RequestOptions {
29+
export interface RunnerOptions extends RequestOptions {
3030
/** How many requests to make before canceling. Default 10. */
3131
maxChatCompletions?: number;
3232
}
@@ -238,7 +238,7 @@ export class AbstractChatCompletionRunner<
238238
protected async _createChatCompletion(
239239
client: OpenAI,
240240
params: ChatCompletionCreateParams,
241-
options?: Core.RequestOptions,
241+
options?: RequestOptions,
242242
): Promise<ParsedChatCompletion<ParsedT>> {
243243
const signal = options?.signal;
244244
if (signal) {
@@ -258,7 +258,7 @@ export class AbstractChatCompletionRunner<
258258
protected async _runChatCompletion(
259259
client: OpenAI,
260260
params: ChatCompletionCreateParams,
261-
options?: Core.RequestOptions,
261+
options?: RequestOptions,
262262
): Promise<ChatCompletion> {
263263
for (const message of params.messages) {
264264
this._addMessage(message, false);

src/lib/AssistantStream.ts

Lines changed: 15 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,7 @@ import {
99
MessageDelta,
1010
MessageContent,
1111
} from '../resources/beta/threads/messages';
12-
import * as Core from '../core';
13-
import { RequestOptions } from '../core';
12+
import { RequestOptions } from '../internal/request-options';
1413
import {
1514
Run,
1615
RunCreateParamsBase,
@@ -19,7 +18,7 @@ import {
1918
RunSubmitToolOutputsParamsBase,
2019
RunSubmitToolOutputsParamsStreaming,
2120
} from '../resources/beta/threads/runs/runs';
22-
import { type ReadableStream } from '../_shims/index';
21+
import { type ReadableStream } from '../internal/shim-types';
2322
import { Stream } from '../streaming';
2423
import { APIUserAbortError, OpenAIError } from '../error';
2524
import {
@@ -31,6 +30,7 @@ import {
3130
import { RunStep, RunStepDelta, ToolCall, ToolCallDelta } from '../resources/beta/threads/runs/steps';
3231
import { ThreadCreateAndRunParamsBase, Threads } from '../resources/beta/threads/threads';
3332
import { BaseEvents, EventStream } from './EventStream';
33+
import { isObj } from '../internal/utils';
3434

3535
export interface AssistantStreamEvents extends BaseEvents {
3636
run: (run: Run) => void;
@@ -163,7 +163,7 @@ export class AssistantStream
163163

164164
protected async _fromReadableStream(
165165
readableStream: ReadableStream,
166-
options?: Core.RequestOptions,
166+
options?: RequestOptions,
167167
): Promise<Run> {
168168
const signal = options?.signal;
169169
if (signal) {
@@ -187,15 +187,14 @@ export class AssistantStream
187187
}
188188

189189
static createToolAssistantStream(
190-
threadId: string,
191190
runId: string,
192191
runs: Runs,
193192
params: RunSubmitToolOutputsParamsStream,
194193
options: RequestOptions | undefined,
195194
): AssistantStream {
196195
const runner = new AssistantStream();
197196
runner._run(() =>
198-
runner._runToolAssistantStream(threadId, runId, runs, params, {
197+
runner._runToolAssistantStream(runId, runs, params, {
199198
...options,
200199
headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' },
201200
}),
@@ -205,10 +204,9 @@ export class AssistantStream
205204

206205
protected async _createToolAssistantStream(
207206
run: Runs,
208-
threadId: string,
209207
runId: string,
210208
params: RunSubmitToolOutputsParamsStream,
211-
options?: Core.RequestOptions,
209+
options?: RequestOptions,
212210
): Promise<Run> {
213211
const signal = options?.signal;
214212
if (signal) {
@@ -217,7 +215,7 @@ export class AssistantStream
217215
}
218216

219217
const body: RunSubmitToolOutputsParamsStreaming = { ...params, stream: true };
220-
const stream = await run.submitToolOutputs(threadId, runId, body, {
218+
const stream = await run.submitToolOutputs(runId, body, {
221219
...options,
222220
signal: this.controller.signal,
223221
});
@@ -303,7 +301,7 @@ export class AssistantStream
303301
protected async _createThreadAssistantStream(
304302
thread: Threads,
305303
params: ThreadCreateAndRunParamsBase,
306-
options?: Core.RequestOptions,
304+
options?: RequestOptions,
307305
): Promise<Run> {
308306
const signal = options?.signal;
309307
if (signal) {
@@ -330,7 +328,7 @@ export class AssistantStream
330328
run: Runs,
331329
threadId: string,
332330
params: RunCreateParamsBase,
333-
options?: Core.RequestOptions,
331+
options?: RequestOptions,
334332
): Promise<Run> {
335333
const signal = options?.signal;
336334
if (signal) {
@@ -676,7 +674,7 @@ export class AssistantStream
676674
accValue += deltaValue;
677675
} else if (typeof accValue === 'number' && typeof deltaValue === 'number') {
678676
accValue += deltaValue;
679-
} else if (Core.isObj(accValue) && Core.isObj(deltaValue)) {
677+
} else if (isObj(accValue) && isObj(deltaValue)) {
680678
accValue = this.accumulateDelta(accValue as Record<string, any>, deltaValue as Record<string, any>);
681679
} else if (Array.isArray(accValue) && Array.isArray(deltaValue)) {
682680
if (accValue.every((x) => typeof x === 'string' || typeof x === 'number')) {
@@ -685,7 +683,7 @@ export class AssistantStream
685683
}
686684

687685
for (const deltaEntry of deltaValue) {
688-
if (!Core.isObj(deltaEntry)) {
686+
if (!isObj(deltaEntry)) {
689687
throw new Error(`Expected array delta entry to be an object but got: ${deltaEntry}`);
690688
}
691689

@@ -748,7 +746,7 @@ export class AssistantStream
748746
protected async _threadAssistantStream(
749747
params: ThreadCreateAndRunParamsBase,
750748
thread: Threads,
751-
options?: Core.RequestOptions,
749+
options?: RequestOptions,
752750
): Promise<Run> {
753751
return await this._createThreadAssistantStream(thread, params, options);
754752
}
@@ -757,18 +755,17 @@ export class AssistantStream
757755
threadId: string,
758756
runs: Runs,
759757
params: RunCreateParamsBase,
760-
options?: Core.RequestOptions,
758+
options?: RequestOptions,
761759
): Promise<Run> {
762760
return await this._createAssistantStream(runs, threadId, params, options);
763761
}
764762

765763
protected async _runToolAssistantStream(
766-
threadId: string,
767764
runId: string,
768765
runs: Runs,
769766
params: RunSubmitToolOutputsParamsStream,
770-
options?: Core.RequestOptions,
767+
options?: RequestOptions,
771768
): Promise<Run> {
772-
return await this._createToolAssistantStream(runs, threadId, runId, params, options);
769+
return await this._createToolAssistantStream(runs, runId, params, options);
773770
}
774771
}

src/lib/ChatCompletionStream.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import * as Core from '../core';
21
import {
32
OpenAIError,
43
APIUserAbortError,
@@ -17,7 +16,7 @@ import {
1716
AbstractChatCompletionRunner,
1817
type AbstractChatCompletionRunnerEvents,
1918
} from './AbstractChatCompletionRunner';
20-
import { type ReadableStream } from '../_shims/index';
19+
import { type ReadableStream } from '../internal/shim-types';
2120
import { Stream } from '../streaming';
2221
import OpenAI from '../index';
2322
import { ParsedChatCompletion } from '../resources/beta/chat/completions';
@@ -30,6 +29,7 @@ import {
3029
shouldParseToolCall,
3130
} from '../lib/parser';
3231
import { partialParse } from '../_vendor/partial-json-parser/parser';
32+
import { RequestOptions } from '../internal/request-options';
3333

3434
export interface ContentDeltaEvent {
3535
delta: string;
@@ -158,7 +158,7 @@ export class ChatCompletionStream<ParsedT = null>
158158
static createChatCompletion<ParsedT>(
159159
client: OpenAI,
160160
params: ChatCompletionStreamParams,
161-
options?: Core.RequestOptions,
161+
options?: RequestOptions,
162162
): ChatCompletionStream<ParsedT> {
163163
const runner = new ChatCompletionStream<ParsedT>(params as ChatCompletionCreateParamsStreaming);
164164
runner._run(() =>
@@ -368,7 +368,7 @@ export class ChatCompletionStream<ParsedT = null>
368368
protected override async _createChatCompletion(
369369
client: OpenAI,
370370
params: ChatCompletionCreateParams,
371-
options?: Core.RequestOptions,
371+
options?: RequestOptions,
372372
): Promise<ParsedChatCompletion<ParsedT>> {
373373
super._createChatCompletion;
374374
const signal = options?.signal;
@@ -394,7 +394,7 @@ export class ChatCompletionStream<ParsedT = null>
394394

395395
protected async _fromReadableStream(
396396
readableStream: ReadableStream,
397-
options?: Core.RequestOptions,
397+
options?: RequestOptions,
398398
): Promise<ChatCompletion> {
399399
const signal = options?.signal;
400400
if (signal) {

src/lib/ChatCompletionStreamingRunner.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import {
33
type ChatCompletionCreateParamsStreaming,
44
} from '../resources/chat/completions';
55
import { RunnerOptions, type AbstractChatCompletionRunnerEvents } from './AbstractChatCompletionRunner';
6-
import { type ReadableStream } from '../_shims/index';
6+
import { type ReadableStream } from '../internal/shim-types';
77
import { RunnableTools, type BaseFunctionsArgs, type RunnableFunctions } from './RunnableFunction';
88
import { ChatCompletionSnapshot, ChatCompletionStream } from './ChatCompletionStream';
99
import OpenAI from '../index';

src/pagination.ts

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import type { OpenAI } from './client';
44
import { OpenAIError } from './error';
55
import { FinalRequestOptions } from './internal/request-options';
6-
import { defaultParseResponse } from './internal/parse';
6+
import { defaultParseResponse, WithRequestID } from './internal/parse';
77
import { APIPromise } from './api-promise';
88
import { type APIResponseProps } from './internal/parse';
99
import { maybeObj } from './internal/utils/values';
@@ -87,7 +87,13 @@ export class PagePromise<
8787
) {
8888
super(
8989
request,
90-
async (props) => new Page(client, props.response, await defaultParseResponse(props), props.options),
90+
async (props) =>
91+
new Page(
92+
client,
93+
props.response,
94+
await defaultParseResponse(props),
95+
props.options,
96+
) as WithRequestID<PageClass>,
9197
);
9298
}
9399

0 commit comments

Comments
 (0)