diff --git a/packages/sdk/server-ai/__tests__/Judge.test.ts b/packages/sdk/server-ai/__tests__/Judge.test.ts index 1144f119f..ed04f1fc4 100644 --- a/packages/sdk/server-ai/__tests__/Judge.test.ts +++ b/packages/sdk/server-ai/__tests__/Judge.test.ts @@ -52,7 +52,6 @@ describe('Judge', () => { provider: { name: 'openai' }, tracker: mockTracker, evaluationMetricKeys: ['relevance', 'accuracy', 'helpfulness'], - toVercelAISDK: jest.fn(), }; }); diff --git a/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts b/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts index 7729dcc32..2f99ed3a4 100644 --- a/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts +++ b/packages/sdk/server-ai/__tests__/LDAIClientImpl.test.ts @@ -262,15 +262,11 @@ describe('completionConfig method', () => { messages: [], tracker: {} as any, enabled: true, - toVercelAISDK: jest.fn(), }; const evaluateSpy = jest.spyOn(client as any, '_evaluate'); evaluateSpy.mockResolvedValue(mockConfig); - const addVercelSpy = jest.spyOn(client as any, '_addVercelAISDKSupport'); - addVercelSpy.mockReturnValue({ ...mockConfig, toVercelAISDK: jest.fn() }); - const result = await client.completionConfig(key, testContext, defaultValue, variables); expect(mockLdClient.track).toHaveBeenCalledWith( @@ -286,10 +282,8 @@ describe('completionConfig method', () => { 'completion', variables, ); - expect(addVercelSpy).toHaveBeenCalled(); expect(result).toBeDefined(); evaluateSpy.mockRestore(); - addVercelSpy.mockRestore(); }); }); diff --git a/packages/sdk/server-ai/__tests__/LDAIConfigMapper.test.ts b/packages/sdk/server-ai/__tests__/LDAIConfigMapper.test.ts deleted file mode 100644 index ddee1b26f..000000000 --- a/packages/sdk/server-ai/__tests__/LDAIConfigMapper.test.ts +++ /dev/null @@ -1,159 +0,0 @@ -import { LDMessage, VercelAISDKMapOptions } from '../src/api/config'; -import { LDAIConfigMapper } from '../src/LDAIConfigMapper'; - -describe('_findParameter', () => { - it('handles undefined model and messages', () => { - const mapper = new LDAIConfigMapper(); - // eslint-disable-next-line @typescript-eslint/dot-notation - expect(mapper['_findParameter']('test-param')).toBeUndefined(); - }); - - it('handles parameter not found', () => { - const mapper = new LDAIConfigMapper({ - name: 'test-ai-model', - parameters: { - 'test-param': 123, - }, - custom: { - 'test-param': 456, - }, - }); - // eslint-disable-next-line @typescript-eslint/dot-notation - expect(mapper['_findParameter']('other-param')).toBeUndefined(); - }); - - it('finds parameter from single model parameter', () => { - const mapper = new LDAIConfigMapper({ - name: 'test-ai-model', - parameters: { - 'test-param': 123, - }, - }); - // eslint-disable-next-line @typescript-eslint/dot-notation - expect(mapper['_findParameter']('test-param')).toEqual(123); - }); - - it('finds parameter from multiple model parameters', () => { - const mapper = new LDAIConfigMapper({ - name: 'test-ai-model', - parameters: { - testParam: 123, - }, - }); - // eslint-disable-next-line @typescript-eslint/dot-notation - expect(mapper['_findParameter']('test-param', 'testParam')).toEqual(123); - }); - - it('finds parameter from single model custom parameter', () => { - const mapper = new LDAIConfigMapper({ - name: 'test-ai-model', - custom: { - 'test-param': 123, - }, - }); - // eslint-disable-next-line @typescript-eslint/dot-notation - expect(mapper['_findParameter']('test-param')).toEqual(123); - }); - - it('finds parameter from multiple model custom parameters', () => { - const mapper = new LDAIConfigMapper({ - name: 'test-ai-model', - custom: { - testParam: 123, - }, - }); - // eslint-disable-next-line @typescript-eslint/dot-notation - expect(mapper['_findParameter']('test-param', 'testParam')).toEqual(123); - }); - - it('gives precedence to model parameters over model custom parameters', () => { - const mapper = new LDAIConfigMapper({ - name: 'test-ai-model', - parameters: { - 'test-param': 123, - }, - custom: { - 'test-param': 456, - }, - }); - // eslint-disable-next-line @typescript-eslint/dot-notation - expect(mapper['_findParameter']('test-param', 'testParam')).toEqual(123); - }); -}); - -describe('toVercelAIAISDK', () => { - const mockModel = { name: 'mockModel' }; - const mockMessages: LDMessage[] = [ - { role: 'user', content: 'test prompt' }, - { role: 'system', content: 'test instruction' }, - ]; - const mockOptions: VercelAISDKMapOptions = { - nonInterpolatedMessages: [{ role: 'assistant', content: 'test assistant instruction' }], - }; - const mockProvider = jest.fn().mockReturnValue(mockModel); - - beforeEach(() => { - jest.clearAllMocks(); - }); - - it('handles undefined model and messages', () => { - const mapper = new LDAIConfigMapper(); - const result = mapper.toVercelAISDK(mockProvider); - - expect(mockProvider).toHaveBeenCalledWith(''); - expect(result).toEqual( - expect.objectContaining({ - model: mockModel, - messages: undefined, - }), - ); - }); - - it('uses additional messages', () => { - const mapper = new LDAIConfigMapper({ name: 'test-ai-model' }); - const result = mapper.toVercelAISDK(mockProvider, mockOptions); - - expect(mockProvider).toHaveBeenCalledWith('test-ai-model'); - expect(result).toEqual( - expect.objectContaining({ - model: mockModel, - messages: mockOptions.nonInterpolatedMessages, - }), - ); - }); - - it('combines config messages and additional messages', () => { - const mapper = new LDAIConfigMapper({ name: 'test-ai-model' }, undefined, mockMessages); - const result = mapper.toVercelAISDK(mockProvider, mockOptions); - - expect(mockProvider).toHaveBeenCalledWith('test-ai-model'); - expect(result).toEqual( - expect.objectContaining({ - model: mockModel, - messages: [...mockMessages, ...(mockOptions.nonInterpolatedMessages ?? [])], - }), - ); - }); - - it('requests parameters correctly', () => { - const mapper = new LDAIConfigMapper({ name: 'test-ai-model' }, undefined, mockMessages); - const findParameterMock = jest.spyOn(mapper as any, '_findParameter'); - const result = mapper.toVercelAISDK(mockProvider); - - expect(mockProvider).toHaveBeenCalledWith('test-ai-model'); - expect(result).toEqual( - expect.objectContaining({ - model: mockModel, - messages: mockMessages, - }), - ); - expect(findParameterMock).toHaveBeenCalledWith('max_tokens', 'maxTokens'); - expect(findParameterMock).toHaveBeenCalledWith('temperature'); - expect(findParameterMock).toHaveBeenCalledWith('top_p', 'topP'); - expect(findParameterMock).toHaveBeenCalledWith('top_k', 'topK'); - expect(findParameterMock).toHaveBeenCalledWith('presence_penalty', 'presencePenalty'); - expect(findParameterMock).toHaveBeenCalledWith('frequency_penalty', 'frequencyPenalty'); - expect(findParameterMock).toHaveBeenCalledWith('stop', 'stop_sequences', 'stopSequences'); - expect(findParameterMock).toHaveBeenCalledWith('seed'); - }); -}); diff --git a/packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts b/packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts index a70fce0b1..1cf9f839b 100644 --- a/packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts +++ b/packages/sdk/server-ai/__tests__/LDAIConfigTrackerImpl.test.ts @@ -467,257 +467,6 @@ describe('Vercel AI SDK generateText', () => { }); }); -describe('Vercel AI SDK streamText', () => { - it('tracks Vercel AI SDK usage', async () => { - const tracker = new LDAIConfigTrackerImpl( - mockLdClient, - configKey, - variationKey, - version, - modelName, - providerName, - testContext, - ); - jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000); - - const TOTAL_TOKENS = 100; - const PROMPT_TOKENS = 49; - const COMPLETION_TOKENS = 51; - - let resolveDone: ((value: boolean) => void) | undefined; - const donePromise = new Promise((resolve) => { - resolveDone = resolve; - }); - - const finishReason = Promise.resolve('stop'); - jest - .spyOn(finishReason, 'then') - .mockImplementationOnce((fn) => finishReason.then(fn).finally(() => resolveDone?.(true))); - - tracker.trackVercelAISDKStreamTextMetrics(() => ({ - finishReason, - usage: Promise.resolve({ - totalTokens: TOTAL_TOKENS, - promptTokens: PROMPT_TOKENS, - completionTokens: COMPLETION_TOKENS, - }), - })); - - await donePromise; - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:duration:total', - testContext, - { configKey, variationKey, version, modelName, providerName }, - 1000, - ); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:generation:success', - testContext, - { configKey, variationKey, version, modelName, providerName }, - 1, - ); - - expect(mockTrack).not.toHaveBeenCalledWith( - '$ld:ai:generation:error', - expect.anything(), - expect.anything(), - expect.anything(), - ); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:tokens:total', - testContext, - { configKey, variationKey, version, modelName, providerName }, - TOTAL_TOKENS, - ); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:tokens:input', - testContext, - { configKey, variationKey, version, modelName, providerName }, - PROMPT_TOKENS, - ); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:tokens:output', - testContext, - { configKey, variationKey, version, modelName, providerName }, - COMPLETION_TOKENS, - ); - }); - - it('tracks error when Vercel AI SDK metrics function throws', async () => { - const tracker = new LDAIConfigTrackerImpl( - mockLdClient, - configKey, - variationKey, - version, - modelName, - providerName, - testContext, - ); - jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000); - - const error = new Error('Vercel AI SDK API error'); - expect(() => - tracker.trackVercelAISDKStreamTextMetrics(() => { - throw error; - }), - ).toThrow(error); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:duration:total', - testContext, - { configKey, variationKey, version, modelName, providerName }, - 1000, - ); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:generation:error', - testContext, - { configKey, variationKey, version, modelName, providerName }, - 1, - ); - - expect(mockTrack).not.toHaveBeenCalledWith( - expect.stringMatching(/^\$ld:ai:tokens:/), - expect.anything(), - expect.anything(), - expect.anything(), - ); - }); - - it('tracks error when Vercel AI SDK finishes because of an error', async () => { - const tracker = new LDAIConfigTrackerImpl( - mockLdClient, - configKey, - variationKey, - version, - modelName, - providerName, - testContext, - ); - jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000); - - tracker.trackVercelAISDKStreamTextMetrics(() => ({ - finishReason: Promise.resolve('error'), - })); - - await new Promise(process.nextTick); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:duration:total', - testContext, - { configKey, variationKey, version, modelName, providerName }, - 1000, - ); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:generation:error', - testContext, - { configKey, variationKey, version, modelName, providerName }, - 1, - ); - - expect(mockTrack).not.toHaveBeenCalledWith( - expect.stringMatching(/^\$ld:ai:tokens:/), - expect.anything(), - expect.anything(), - expect.anything(), - ); - }); - - it('tracks error when Vercel AI SDK finishReason promise rejects', async () => { - const tracker = new LDAIConfigTrackerImpl( - mockLdClient, - configKey, - variationKey, - version, - modelName, - providerName, - testContext, - ); - jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000); - - tracker.trackVercelAISDKStreamTextMetrics(() => ({ - finishReason: Promise.reject(new Error('Vercel AI SDK API error')), - })); - - await new Promise(process.nextTick); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:duration:total', - testContext, - { configKey, variationKey, version, modelName, providerName }, - 1000, - ); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:generation:error', - testContext, - { configKey, variationKey, version, modelName, providerName }, - 1, - ); - - expect(mockTrack).not.toHaveBeenCalledWith( - expect.stringMatching(/^\$ld:ai:tokens:/), - expect.anything(), - expect.anything(), - expect.anything(), - ); - }); - - it('squashes error when Vercel AI SDK usage promise rejects', async () => { - const tracker = new LDAIConfigTrackerImpl( - mockLdClient, - configKey, - variationKey, - version, - modelName, - providerName, - testContext, - ); - jest.spyOn(global.Date, 'now').mockReturnValueOnce(1000).mockReturnValueOnce(2000); - - tracker.trackVercelAISDKStreamTextMetrics(() => ({ - finishReason: Promise.resolve('stop'), - usage: Promise.reject(new Error('Vercel AI SDK API error')), - })); - - await new Promise(process.nextTick); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:duration:total', - testContext, - { configKey, variationKey, version, modelName, providerName }, - 1000, - ); - - expect(mockTrack).toHaveBeenCalledWith( - '$ld:ai:generation:success', - testContext, - { configKey, variationKey, version, modelName, providerName }, - 1, - ); - - expect(mockTrack).not.toHaveBeenCalledWith( - '$ld:ai:generation:error', - expect.anything(), - expect.anything(), - expect.anything(), - ); - - expect(mockTrack).not.toHaveBeenCalledWith( - expect.stringMatching(/^\$ld:ai:tokens:/), - expect.anything(), - expect.anything(), - expect.anything(), - ); - }); -}); - it('tracks tokens', () => { const tracker = new LDAIConfigTrackerImpl( mockLdClient, diff --git a/packages/sdk/server-ai/__tests__/TrackedChat.test.ts b/packages/sdk/server-ai/__tests__/TrackedChat.test.ts index b972a7343..e70835e4f 100644 --- a/packages/sdk/server-ai/__tests__/TrackedChat.test.ts +++ b/packages/sdk/server-ai/__tests__/TrackedChat.test.ts @@ -38,7 +38,6 @@ describe('TrackedChat', () => { model: { name: 'gpt-4' }, provider: { name: 'openai' }, tracker: mockTracker, - toVercelAISDK: jest.fn(), }; }); diff --git a/packages/sdk/server-ai/src/LDAIClientImpl.ts b/packages/sdk/server-ai/src/LDAIClientImpl.ts index e9464a988..54cb054fe 100644 --- a/packages/sdk/server-ai/src/LDAIClientImpl.ts +++ b/packages/sdk/server-ai/src/LDAIClientImpl.ts @@ -16,15 +16,11 @@ import { LDAIJudgeConfigDefault, LDJudge, LDMessage, - VercelAISDKConfig, - VercelAISDKMapOptions, - VercelAISDKProvider, } from './api/config'; import { LDAIConfigFlagValue, LDAIConfigUtils } from './api/config/LDAIConfigUtils'; import { Judge } from './api/judge/Judge'; import { LDAIClient } from './api/LDAIClient'; import { AIProviderFactory, SupportedAIProvider } from './api/providers'; -import { LDAIConfigMapper } from './LDAIConfigMapper'; import { LDAIConfigTrackerImpl } from './LDAIConfigTrackerImpl'; import { LDClientMin } from './LDClientMin'; @@ -115,19 +111,6 @@ export class LDAIClientImpl implements LDAIClient { return config; } - private _addVercelAISDKSupport(config: LDAICompletionConfig): LDAICompletionConfig { - const { messages } = config; - const mapper = new LDAIConfigMapper(config.model, config.provider, messages); - - return { - ...config, - toVercelAISDK: ( - sdkProvider: VercelAISDKProvider | Record>, - options?: VercelAISDKMapOptions | undefined, - ): VercelAISDKConfig => mapper.toVercelAISDK(sdkProvider, options), - }; - } - private async _initializeJudges( judgeConfigs: LDJudge[], context: LDContext, @@ -166,7 +149,7 @@ export class LDAIClientImpl implements LDAIClient { this._ldClient.track(TRACK_CONFIG_SINGLE, context, key, 1); const config = await this._evaluate(key, context, defaultValue, 'completion', variables); - return this._addVercelAISDKSupport(config as LDAICompletionConfig); + return config as LDAICompletionConfig; } /** diff --git a/packages/sdk/server-ai/src/LDAIConfigMapper.ts b/packages/sdk/server-ai/src/LDAIConfigMapper.ts deleted file mode 100644 index 827f4d01e..000000000 --- a/packages/sdk/server-ai/src/LDAIConfigMapper.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { - LDMessage, - LDModelConfig, - LDProviderConfig, - VercelAISDKConfig, - VercelAISDKMapOptions, - VercelAISDKProvider, -} from './api/config'; - -export class LDAIConfigMapper { - constructor( - private _model?: LDModelConfig | undefined, - private _provider?: LDProviderConfig | undefined, - private _messages?: LDMessage[] | undefined, - ) {} - - private _findParameter(...paramNames: string[]): T | undefined { - for (let i = 0; i < paramNames.length; i += 1) { - const paramName = paramNames[i]; - if (this._model?.parameters?.[paramName] !== undefined) { - return this._model?.parameters?.[paramName] as T; - } - if (this._model?.custom?.[paramName] !== undefined) { - return this._model?.custom?.[paramName] as T; - } - } - return undefined; - } - - /** - * @deprecated Use `VercelProvider.toVercelAISDK()` from the `@launchdarkly/server-sdk-ai-vercel` package instead. - * This method will be removed in a future version. - */ - toVercelAISDK( - provider: VercelAISDKProvider | Record>, - options?: VercelAISDKMapOptions | undefined, - ): VercelAISDKConfig { - let model: TMod | undefined; - if (typeof provider === 'function') { - model = provider(this._model?.name ?? ''); - } else { - model = provider[this._provider?.name ?? '']?.(this._model?.name ?? ''); - } - if (!model) { - throw new Error( - 'Vercel AI SDK model cannot be determined from the supplied provider parameter.', - ); - } - - let messages: LDMessage[] | undefined; - if (this._messages || options?.nonInterpolatedMessages) { - messages = [...(this._messages ?? []), ...(options?.nonInterpolatedMessages ?? [])]; - } - - return { - model, - messages, - maxTokens: this._findParameter('max_tokens', 'maxTokens'), - maxOutputTokens: this._findParameter('max_tokens', 'maxTokens'), - temperature: this._findParameter('temperature'), - topP: this._findParameter('top_p', 'topP'), - topK: this._findParameter('top_k', 'topK'), - presencePenalty: this._findParameter('presence_penalty', 'presencePenalty'), - frequencyPenalty: this._findParameter('frequency_penalty', 'frequencyPenalty'), - stopSequences: this._findParameter('stop', 'stop_sequences', 'stopSequences'), - seed: this._findParameter('seed'), - }; - } -} diff --git a/packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts b/packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts index c55051f4c..9990c8505 100644 --- a/packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts +++ b/packages/sdk/server-ai/src/LDAIConfigTrackerImpl.ts @@ -250,52 +250,6 @@ export class LDAIConfigTrackerImpl implements LDAIConfigTracker { } } - trackVercelAISDKStreamTextMetrics< - TRes extends { - finishReason?: Promise; - usage?: Promise<{ - totalTokens?: number; - inputTokens?: number; - promptTokens?: number; - outputTokens?: number; - completionTokens?: number; - }>; - }, - >(func: () => TRes): TRes { - const startTime = Date.now(); - try { - const result = func(); - result.finishReason - ?.then(async (finishReason) => { - const endTime = Date.now(); - this.trackDuration(endTime - startTime); - if (finishReason === 'error') { - this.trackError(); - } else { - this.trackSuccess(); - if (result.usage) { - try { - this.trackTokens(createVercelAISDKTokenUsage(await result.usage)); - } catch { - // Intentionally squashing this error - } - } - } - }) - .catch(() => { - const endTime = Date.now(); - this.trackDuration(endTime - startTime); - this.trackError(); - }); - return result; - } catch (err) { - const endTime = Date.now(); - this.trackDuration(endTime - startTime); - this.trackError(); - throw err; - } - } - trackTokens(tokens: LDTokenUsage): void { this._trackedMetrics.tokens = tokens; const trackData = this.getTrackData(); diff --git a/packages/sdk/server-ai/src/api/config/LDAIConfigTracker.ts b/packages/sdk/server-ai/src/api/config/LDAIConfigTracker.ts index ed1eed4d3..130a8d041 100644 --- a/packages/sdk/server-ai/src/api/config/LDAIConfigTracker.ts +++ b/packages/sdk/server-ai/src/api/config/LDAIConfigTracker.ts @@ -220,36 +220,6 @@ export interface LDAIConfigTracker { func: () => Promise, ): Promise; - /** - * Track a Vercel AI SDK streamText operation. - * - * This function will track the duration of the operation, the token usage, and the success or error status. - * - * If the provided function throws, then this method will also throw. - * In the case the provided function throws, this function will record the duration and an error. - * A failed operation will not have any token usage data. - * - * @deprecated Use `trackStreamMetricsOf()` with `VercelProvider.createStreamMetricsExtractor()` from the - * `@launchdarkly/server-sdk-ai-vercel` package instead. This method will be removed in a future version. - * - * @param func Function which executes the operation. - * @returns The result of the operation. - */ - trackVercelAISDKStreamTextMetrics< - TRes extends { - finishReason?: Promise; - usage?: Promise<{ - totalTokens?: number; - inputTokens?: number; - promptTokens?: number; - outputTokens?: number; - completionTokens?: number; - }>; - }, - >( - func: () => TRes, - ): TRes; - /** * Get a summary of the tracked metrics. */ diff --git a/packages/sdk/server-ai/src/api/config/VercelAISDK.ts b/packages/sdk/server-ai/src/api/config/VercelAISDK.ts deleted file mode 100644 index 290ae45f7..000000000 --- a/packages/sdk/server-ai/src/api/config/VercelAISDK.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { type LDMessage } from './types'; - -/** - * @deprecated Use `VercelAISDKProvider` from the `@launchdarkly/server-sdk-ai-vercel` package instead. - * This type will be removed in a future version. - */ -export type VercelAISDKProvider = (modelName: string) => TMod; - -/** - * @deprecated Use `VercelAISDKMapOptions` from the `@launchdarkly/server-sdk-ai-vercel` package instead. - * This type will be removed in a future version. - */ -export interface VercelAISDKMapOptions { - nonInterpolatedMessages?: LDMessage[] | undefined; -} - -/** - * @deprecated Use `VercelAISDKConfig` from the `@launchdarkly/server-sdk-ai-vercel` package instead. - * This type will be removed in a future version. - */ -export interface VercelAISDKConfig { - model: TMod; - messages?: LDMessage[] | undefined; - maxTokens?: number | undefined; - maxOutputTokens?: number | undefined; - temperature?: number | undefined; - topP?: number | undefined; - topK?: number | undefined; - presencePenalty?: number | undefined; - frequencyPenalty?: number | undefined; - stopSequences?: string[] | undefined; - seed?: number | undefined; -} diff --git a/packages/sdk/server-ai/src/api/config/index.ts b/packages/sdk/server-ai/src/api/config/index.ts index 54f799a13..e35ffba15 100644 --- a/packages/sdk/server-ai/src/api/config/index.ts +++ b/packages/sdk/server-ai/src/api/config/index.ts @@ -1,4 +1,3 @@ export * from './types'; // LDAIConfigUtils is intentionally not exported - it's an internal utility class -export * from './VercelAISDK'; export { LDAIConfigTracker } from './LDAIConfigTracker'; diff --git a/packages/sdk/server-ai/src/api/config/types.ts b/packages/sdk/server-ai/src/api/config/types.ts index 9e2ebd01d..d6682676e 100644 --- a/packages/sdk/server-ai/src/api/config/types.ts +++ b/packages/sdk/server-ai/src/api/config/types.ts @@ -1,5 +1,4 @@ import { LDAIConfigTracker } from './LDAIConfigTracker'; -import { VercelAISDKConfig, VercelAISDKMapOptions, VercelAISDKProvider } from './VercelAISDK'; // ============================================================================ // Foundation Types @@ -106,25 +105,6 @@ export interface LDAIConfig extends Omit { * Undefined for disabled configs. */ tracker?: LDAIConfigTracker; - - /** - * Maps this AI config to a format usable direcly in Vercel AI SDK generateText() - * and streamText() methods. - * - * WARNING: this method can throw an exception if a Vercel AI SDK model cannot be determined. - * - * @deprecated Use `VercelProvider.toVercelAISDK()` from the `@launchdarkly/server-sdk-ai-vercel` package instead. - * This method will be removed in a future version. - * - * @param provider A Vercel AI SDK Provider or a map of provider names to Vercel AI SDK Providers. - * @param options Optional mapping options. - * @returns A configuration directly usable in Vercel AI SDK generateText() and streamText() - * @throws {Error} if a Vercel AI SDK model cannot be determined from the given provider parameter. - */ - toVercelAISDK?: ( - provider: VercelAISDKProvider | Record>, - options?: VercelAISDKMapOptions | undefined, - ) => VercelAISDKConfig; } // ============================================================================