diff --git a/package.json b/package.json index 313d3cf351..daed94409c 100644 --- a/package.json +++ b/package.json @@ -34,6 +34,8 @@ "packages/sdk/server-ai/examples/bedrock", "packages/sdk/server-ai/examples/openai", "packages/sdk/server-ai/examples/vercel-ai", + "packages/sdk/cloudflare-ai", + "packages/sdk/cloudflare-ai/example", "packages/telemetry/browser-telemetry", "contract-tests", "packages/sdk/combined-browser" diff --git a/packages/sdk/cloudflare-ai/.eslintignore b/packages/sdk/cloudflare-ai/.eslintignore new file mode 100644 index 0000000000..ad5d5054b2 --- /dev/null +++ b/packages/sdk/cloudflare-ai/.eslintignore @@ -0,0 +1,2 @@ +example + diff --git a/packages/sdk/cloudflare-ai/CHANGELOG.md b/packages/sdk/cloudflare-ai/CHANGELOG.md new file mode 100644 index 0000000000..895cd6b084 --- /dev/null +++ b/packages/sdk/cloudflare-ai/CHANGELOG.md @@ -0,0 +1,39 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] + +### Added +- Initial MVP release +- AI Config support for Cloudflare Workers AI +- Model name aliasing and resolution +- Template-based prompt interpolation with Mustache +- Comprehensive metrics tracking +- TypeScript type definitions +- Support for 15+ common AI models +- CloudflareAIModelMapper for model ID resolution +- LDAIClient with config method +- LDAIConfigTracker for analytics +- Basic test coverage +- Documentation and examples + +## [0.1.0] - 2025-10-07 + +### Added +- Initial alpha release of `@launchdarkly/cloudflare-server-sdk-ai` +- `initAi(ldClient, clientSideID?, kvNamespace?)` initializer +- `aiClient.config(key, context, defaultValue, variables?)` +- `config.toCloudflareWorkersAI(options?)` for mapping to Workers AI format + +- Template interpolation for messages using variables +- Metrics tracking: success, duration, token usage, optional error tracking +- Example Cloudflare Worker and setup docs + +### Notes +- Requires LaunchDarkly Cloudflare KV integration to be enabled +- Tested with Workers AI [ai] binding and `nodejs_compat` flag + diff --git a/packages/sdk/cloudflare-ai/LICENSE b/packages/sdk/cloudflare-ai/LICENSE new file mode 100644 index 0000000000..ee545c3d2e --- /dev/null +++ b/packages/sdk/cloudflare-ai/LICENSE @@ -0,0 +1,70 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +(a) You must give any other recipients of the Work or Derivative Works a copy of this License; and + +(b) You must cause any modified files to carry prominent notices stating that You changed the files; and + +(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +Copyright 2025 LaunchDarkly + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + diff --git a/packages/sdk/cloudflare-ai/README.md b/packages/sdk/cloudflare-ai/README.md new file mode 100644 index 0000000000..c74f21bd7f --- /dev/null +++ b/packages/sdk/cloudflare-ai/README.md @@ -0,0 +1,116 @@ +# LaunchDarkly AI SDK for Cloudflare Workers + +# ⛔️⛔️⛔️⛔️ + +> [!CAUTION] +> This library is an alpha version and should not be considered ready for production use while this message is visible. + +# ☝️☝️☝️☝️☝️☝️ + +## LaunchDarkly overview + +[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves over 100 billion feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/home/getting-started) using LaunchDarkly today! + +[![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) + +## Quick start + +Assumes you’ve installed the LaunchDarkly Cloudflare server SDK and enabled KV. + +1) Install: + +```shell +npm install @launchdarkly/cloudflare-server-sdk-ai --save +``` + +2) Configure `wrangler.toml`: + +```toml +compatibility_flags = ["nodejs_compat"] + +[ai] +binding = "AI" +``` + +3) Use in a Worker: + +```typescript +import { init } from '@launchdarkly/cloudflare-server-sdk'; +import { initAi } from '@launchdarkly/cloudflare-server-sdk-ai'; + +export default { + async fetch(_request, env, ctx) { + const ldClient = init(env.LD_CLIENT_ID, env.LD_KV, { sendEvents: true }); + await ldClient.waitForInitialization(); + + const ai = initAi(ldClient, { clientSideID: env.LD_CLIENT_ID, kvNamespace: env.LD_KV }); + const context = { kind: 'user', key: 'example-user' }; + + const config = await ai.config( + 'my-ai-config', + context, + { enabled: false, model: { name: '@cf/meta/llama-3-8b-instruct' } }, + { username: 'Sandy' }, + ); + + if (!config.enabled) return new Response('AI disabled', { status: 503 }); + + const wc = config.toWorkersAI(env.AI); + const result = await config.tracker.trackWorkersAIMetrics(() => env.AI.run(wc.model, wc)); + + ctx.waitUntil(ldClient.flush().finally(() => ldClient.close())); + return Response.json(result); + } +}; +``` + +See `example/` for a full working sample. + +## API (brief) + +• `initAi(ldClient, options?)` → `LDAIClient` +• `aiClient.config(key, context, defaultValue, variables?)` → `Promise` +• `aiClient.agent(key, context, defaultValue, variables?)` → `Promise` +• `aiClient.agents(configs, context)` → `Promise>` +• `config.toWorkersAI(env.AI, options?)` → `WorkersAIConfig` + +Metrics: + +```typescript +await config.tracker.trackWorkersAIMetrics(() => env.AI.run(wc.model, wc)); +const stream = config.tracker.trackWorkersAIStreamMetrics(() => env.AI.run(wc.model, { ...wc, stream: true })); +``` + +Notes: +- Templates use Mustache. Variables you pass plus the LD context via `{{ldctx.*}}` are available. +- Parameter names are normalized when mapping to Workers AI (e.g., `maxTokens`/`maxtokens` → `max_tokens`, `topP`/`topp` → `top_p`, `topK`/`topk` → `top_k`). + + +## Contributing + +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this SDK. + +## About LaunchDarkly + +- LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: + - Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. + - Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). + - Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. + - Grant access to certain features based on user attributes, like payment plan (eg: users on the 'gold' plan get access to more features than users in the 'silver' plan). + - Disable parts of your application to facilitate maintenance, without taking everything offline. +- LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Check out [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. +- Explore LaunchDarkly + - [launchdarkly.com](https://www.launchdarkly.com/ 'LaunchDarkly Main Website') for more information + - [docs.launchdarkly.com](https://docs.launchdarkly.com/ 'LaunchDarkly Documentation') for our documentation and SDK reference guides + - [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ 'LaunchDarkly API Documentation') for our API documentation + - [blog.launchdarkly.com](https://blog.launchdarkly.com/ 'LaunchDarkly Blog Documentation') for the latest product updates + +[cf-ai-sdk-ci-badge]: https://github.com/launchdarkly/js-core/actions/workflows/cloudflare-ai.yml/badge.svg +[cf-ai-sdk-ci]: https://github.com/launchdarkly/js-core/actions/workflows/cloudflare-ai.yml +[cf-ai-sdk-npm-badge]: https://img.shields.io/npm/v/@launchdarkly/cloudflare-server-sdk-ai.svg?style=flat-square +[cf-ai-sdk-npm-link]: https://www.npmjs.com/package/@launchdarkly/cloudflare-server-sdk-ai +[cf-ai-sdk-ghp-badge]: https://img.shields.io/static/v1?label=GitHub+Pages&message=API+reference&color=00add8 +[cf-ai-sdk-ghp-link]: https://launchdarkly.github.io/js-core/packages/sdk/cloudflare-ai/docs/ +[cf-ai-sdk-dm-badge]: https://img.shields.io/npm/dm/@launchdarkly/cloudflare-server-sdk-ai.svg?style=flat-square +[cf-ai-sdk-dt-badge]: https://img.shields.io/npm/dt/@launchdarkly/cloudflare-server-sdk-ai.svg?style=flat-square + diff --git a/packages/sdk/cloudflare-ai/__tests__/LDAIClient.test.ts b/packages/sdk/cloudflare-ai/__tests__/LDAIClient.test.ts new file mode 100644 index 0000000000..86eb4edd7f --- /dev/null +++ b/packages/sdk/cloudflare-ai/__tests__/LDAIClient.test.ts @@ -0,0 +1,173 @@ +import { LDAIClientImpl } from '../src/LDAIClientImpl'; +import type { LDClientMin } from '../src/LDClientMin'; + +describe('LDAIClient', () => { + let mockLDClient: jest.Mocked; + let aiClient: LDAIClientImpl; + + beforeEach(() => { + mockLDClient = { + variation: jest.fn(), + variationDetail: jest.fn(), + track: jest.fn(), + }; + aiClient = new LDAIClientImpl(mockLDClient); + }); + + describe('config', () => { + it('retrieves config from LaunchDarkly', async () => { + mockLDClient.variationDetail.mockResolvedValue({ + value: { + model: { name: '@cf/meta/llama-3.3-70b-instruct-fp8-fast' }, + messages: [{ role: 'user', content: 'Hello' }], + // eslint-disable-next-line no-underscore-dangle + _ldMeta: { enabled: true, variationKey: 'on', version: 1 }, + }, + } as any); + + const config = await aiClient.config( + 'test-config', + { kind: 'user', key: 'example-user-key', name: 'Sandy' }, + {} as any, + ); + + expect(config.enabled).toBe(true); + expect(config.model?.name).toBe('@cf/meta/llama-3.3-70b-instruct-fp8-fast'); + expect(config.messages).toHaveLength(1); + }); + + it('uses default value when LaunchDarkly returns it', async () => { + const defaultValue = { + model: { name: 'default-model' }, + }; + + mockLDClient.variationDetail.mockResolvedValue({ value: defaultValue } as any); + + const config = await aiClient.config( + 'test-config', + { kind: 'user', key: 'example-user-key', name: 'Sandy' }, + defaultValue, + ); + + expect(config.enabled).toBe(false); + expect(config.model?.name).toBe('default-model'); + }); + + it('interpolates variables in messages', async () => { + mockLDClient.variationDetail.mockResolvedValue({ + value: { + model: { name: '@cf/meta/llama-3.3-70b-instruct-fp8-fast' }, + messages: [{ role: 'user', content: 'Hello {{username}}!' }], + // eslint-disable-next-line no-underscore-dangle + _ldMeta: { enabled: true, variationKey: 'on', version: 1 }, + }, + } as any); + + const config = await aiClient.config( + 'test-config', + { kind: 'user', key: 'example-user-key', name: 'Sandy' }, + {}, + { myVariable: 'My User Defined Variable' } as any, + ); + + expect(config.messages?.[0].content).toBe('Hello Sandy!'); + }); + + it('tracks config usage', async () => { + mockLDClient.variationDetail.mockResolvedValue({ + value: { + // eslint-disable-next-line no-underscore-dangle + _ldMeta: { enabled: true, variationKey: 'on', version: 1 }, + }, + } as any); + + await aiClient.config( + 'test-config', + { kind: 'user', key: 'example-user-key', name: 'Sandy' }, + {} as any, + ); + + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:generation', + { kind: 'user', key: 'example-user-key' }, + 'test-config', + 1, + ); + }); + + it('provides toWorkersAI conversion method', async () => { + mockLDClient.variationDetail.mockResolvedValue({ + value: { + model: { name: '@cf/meta/llama-3.3-70b-instruct-fp8-fast' }, + messages: [{ role: 'user', content: 'Hello' }], + // eslint-disable-next-line no-underscore-dangle + _ldMeta: { enabled: true, variationKey: 'on', version: 1 }, + }, + } as any); + + const config = await aiClient.config( + 'test-config', + { kind: 'user', key: 'example-user-key', name: 'Sandy' }, + {} as any, + ); + + const wc = (config as any).toWorkersAI({} as any); + + expect(wc.model).toBe('@cf/meta/llama-3.3-70b-instruct-fp8-fast'); + expect(wc.messages).toHaveLength(1); + }); + + it('includes provider information', async () => { + mockLDClient.variationDetail.mockResolvedValue({ + value: { + model: { name: '@cf/meta/llama-3.3-70b-instruct-fp8-fast' }, + provider: { name: 'cloudflare-workers-ai' }, + // eslint-disable-next-line no-underscore-dangle + _ldMeta: { enabled: true, variationKey: 'on', version: 1 }, + }, + } as any); + + const config = await aiClient.config( + 'test-config', + { kind: 'user', key: 'example-user-key', name: 'Sandy' }, + {} as any, + ); + + expect(config.provider?.name).toBe('cloudflare-workers-ai'); + }); + + it('creates tracker with correct metadata', async () => { + mockLDClient.variationDetail.mockResolvedValue({ + value: { + model: { name: '@cf/meta/llama-3.3-70b-instruct-fp8-fast' }, + provider: { name: 'cloudflare' }, + // eslint-disable-next-line no-underscore-dangle + _ldMeta: { enabled: true, variationKey: 'variation-1', version: 2 }, + }, + } as any); + + const config = await aiClient.config( + 'test-config', + { kind: 'user', key: 'example-user-key', name: 'Sandy' }, + { enabled: false } as any, + ); + + expect(config.tracker).toBeDefined(); + + config.tracker.trackSuccess(); + + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:generation:success', + { kind: 'user', key: 'example-user-key', name: 'Sandy' }, + expect.objectContaining({ + aiConfigKey: 'test-config', + variationKey: 'variation-1', + version: 2, + model: '@cf/meta/llama-3.3-70b-instruct-fp8-fast', + provider: 'cloudflare', + }), + 1, + ); + }); + }); +}); diff --git a/packages/sdk/cloudflare-ai/__tests__/LDAIConfigTrackerImpl.test.ts b/packages/sdk/cloudflare-ai/__tests__/LDAIConfigTrackerImpl.test.ts new file mode 100644 index 0000000000..984e4a94cb --- /dev/null +++ b/packages/sdk/cloudflare-ai/__tests__/LDAIConfigTrackerImpl.test.ts @@ -0,0 +1,268 @@ +import { LDAIConfigTrackerImpl } from '../src/LDAIConfigTrackerImpl'; +import type { LDClientMin } from '../src/LDClientMin'; + +describe('LDAIConfigTrackerImpl metrics', () => { + let mockLDClient: jest.Mocked; + let tracker: LDAIConfigTrackerImpl; + const context = { kind: 'user', key: 'example-user-key', name: 'Sandy' } as any; + + beforeEach(() => { + mockLDClient = { + variation: jest.fn(), + variationDetail: jest.fn(), + track: jest.fn(), + }; + + tracker = new LDAIConfigTrackerImpl( + mockLDClient, + 'ai-config-key', + 'variation-key', + 3, + '@cf/test-model', + 'cloudflare-workers-ai', + context, + ); + }); + + afterEach(() => { + jest.restoreAllMocks(); + }); + + it('tracks success metric with metadata', () => { + tracker.trackSuccess(); + + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:generation:success', + context, + expect.objectContaining({ + aiConfigKey: 'ai-config-key', + variationKey: 'variation-key', + version: 3, + model: '@cf/test-model', + provider: 'cloudflare-workers-ai', + }), + 1, + ); + }); + + it('tracks error metric with metadata', () => { + tracker.trackError(); + + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:generation:error', + context, + expect.any(Object), + 1, + ); + }); + + it('tracks duration metric', () => { + tracker.trackDuration(1250); + + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:duration:total', + context, + expect.objectContaining({ + aiConfigKey: 'ai-config-key', + variationKey: 'variation-key', + }), + 1250, + ); + }); + + it('tracks token metrics', () => { + tracker.trackTokens({ input: 5, output: 7, total: 12 }); + + expect(mockLDClient.track).toHaveBeenNthCalledWith( + 1, + '$ld:ai:tokens:total', + context, + expect.any(Object), + 12, + ); + expect(mockLDClient.track).toHaveBeenNthCalledWith( + 2, + '$ld:ai:tokens:input', + context, + expect.any(Object), + 5, + ); + expect(mockLDClient.track).toHaveBeenNthCalledWith( + 3, + '$ld:ai:tokens:output', + context, + expect.any(Object), + 7, + ); + }); + + it('tracks time to first token metric', () => { + tracker.trackTimeToFirstToken(321); + + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:tokens:ttf', + context, + expect.objectContaining({ + model: '@cf/test-model', + }), + 321, + ); + }); + + it('tracks aggregated metrics via trackMetrics helper', () => { + tracker.trackMetrics({ + durationMs: 640, + usage: { input: 2, output: 3, total: 5 }, + success: true, + }); + + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:duration:total', + context, + expect.any(Object), + 640, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:generation:success', + context, + expect.any(Object), + 1, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:tokens:total', + context, + expect.any(Object), + 5, + ); + }); + + it('tracks metrics from Workers AI promise responses', async () => { + const dateSpy = jest.spyOn(Date, 'now'); + dateSpy.mockReturnValueOnce(1).mockReturnValueOnce(101); + + const result = await tracker.trackWorkersAIMetrics(async () => ({ + usage: { + prompt_tokens: 4, + completion_tokens: 6, + total_tokens: 10, + }, + })); + + expect(result).toEqual({ + usage: { + prompt_tokens: 4, + completion_tokens: 6, + total_tokens: 10, + }, + }); + + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:duration:total', + context, + expect.any(Object), + 100, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:generation:success', + context, + expect.any(Object), + 1, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:tokens:input', + context, + expect.any(Object), + 4, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:tokens:output', + context, + expect.any(Object), + 6, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:tokens:total', + context, + expect.any(Object), + 10, + ); + }); + + it('tracks errors from Workers AI promise responses', async () => { + const dateSpy = jest.spyOn(Date, 'now'); + dateSpy.mockReturnValueOnce(10).mockReturnValueOnce(30); + + await expect( + tracker.trackWorkersAIMetrics(async () => { + throw new Error('boom'); + }), + ).rejects.toThrow('boom'); + + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:duration:total', + context, + expect.any(Object), + 20, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:generation:error', + context, + expect.any(Object), + 1, + ); + }); + + it('tracks metrics from Workers AI streaming responses', async () => { + const dateSpy = jest.spyOn(Date, 'now'); + dateSpy.mockReturnValueOnce(5).mockReturnValueOnce(155); + + const usagePromise = Promise.resolve({ + input_tokens: 8, + output_tokens: 9, + }); + const finishReason = Promise.resolve('stop'); + + tracker.trackWorkersAIStreamMetrics( + () => + ({ + usage: usagePromise, + finishReason, + }) as any, + ); + + await finishReason; + await usagePromise; + await Promise.resolve(); + + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:duration:total', + context, + expect.any(Object), + 150, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:generation:success', + context, + expect.any(Object), + 1, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:tokens:input', + context, + expect.any(Object), + 8, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:tokens:output', + context, + expect.any(Object), + 9, + ); + expect(mockLDClient.track).toHaveBeenCalledWith( + '$ld:ai:tokens:total', + context, + expect.any(Object), + 17, + ); + }); +}); diff --git a/packages/sdk/cloudflare-ai/example/.gitignore b/packages/sdk/cloudflare-ai/example/.gitignore new file mode 100644 index 0000000000..b99bbc2114 --- /dev/null +++ b/packages/sdk/cloudflare-ai/example/.gitignore @@ -0,0 +1,5 @@ +node_modules +dist +.wrangler +.dev.vars + diff --git a/packages/sdk/cloudflare-ai/example/README.md b/packages/sdk/cloudflare-ai/example/README.md new file mode 100644 index 0000000000..c0916a43bc --- /dev/null +++ b/packages/sdk/cloudflare-ai/example/README.md @@ -0,0 +1,170 @@ +# Cloudflare Workers AI + LaunchDarkly: Random Joke Example + +This example shows how to use LaunchDarkly AI Configs with Cloudflare Workers AI to generate a random joke when you curl the endpoint with a user ID. + +## Prerequisites + +1. Node.js 16 or higher +2. Yarn package manager +3. Cloudflare account with Workers AI enabled +4. Wrangler CLI (`yarn add -D wrangler@latest`) +5. LaunchDarkly account +6. **LaunchDarkly Cloudflare KV integration enabled** (see setup below) + +**This SDK requires the [LaunchDarkly Cloudflare KV integration](https://docs.launchdarkly.com/integrations/cloudflare)** to automatically sync your feature flags to Cloudflare KV. + +### Enable the Integration: + +1. Go to [LaunchDarkly → Settings → Integrations](https://app.launchdarkly.com/settings/integrations) +2. Find **Cloudflare KV** and click **Add Integration** +3. Connect your Cloudflare account +4. Select your KV namespace (create one if needed) +5. Choose the LaunchDarkly environment to sync +6. Save the integration + +Once enabled, LaunchDarkly will automatically push your flags to Cloudflare KV whenever they change. The SDK reads from this KV storage for ultra-fast edge evaluation. + + +## Setup + +### 1. Build the SDKs + +From the root of js-core: + +```bash +yarn && yarn build +``` + +### 2. Create KV Namespace + +Create a KV namespace for LaunchDarkly data: + +```bash +# Login to Cloudflare +wrangler login + +# Create KV namespace (production) +wrangler kv:namespace create "LD_KV" +# Note the ID returned + +# Create KV namespace (preview) +wrangler kv:namespace create "LD_KV" --preview +# Note the preview ID returned +``` + +### 3. Configure wrangler.toml + +Edit `wrangler.toml` and replace: + +- `YOUR_KV_ID` with your production KV namespace ID +- `YOUR_PREVIEW_KV_ID` with your preview KV namespace ID +- `your-client-side-id` with your LaunchDarkly client-side ID + +```toml +compatibility_flags = ["nodejs_compat"] + +kv_namespaces = [{ binding = "LD_KV", id = "YOUR_KV_ID", preview_id = "YOUR_PREVIEW_KV_ID" }] + +[vars] +LD_CLIENT_ID = "LD_CLIENT_ID" + +[ai] +binding = "AI" +``` + +### 4. Create LaunchDarkly AI Config (Random Joke) + +Create an AI Config that the worker will use to generate a random joke. + +1) In LaunchDarkly, go to **AI Configs → Create AI Config** +- **Key**: `random-joke` (required — the worker uses this key) +- Click **Create AI Config** + +2) Choose a Cloudflare model +- You can use any model from Cloudflare's list. If it's not pre-listed in LaunchDarkly, add a custom model: + - Go to **Project settings → AI model configs → Add custom model** + - **Model ID**: paste a Workers AI model ID (e.g. `@cf/meta/llama-3.1-8b-instruct-fast`) + - **Provider**: `Cloudflare Workers AI` (or Custom) + - Save + +3) Set parameters (optional but recommended) +- `temperature`: `0.7` to `0.9` +- `max_tokens`: `120` to `200` + +4) Add message (your prompt) +- User: `Tell a random joke.` + +5) Targeting +- Enable targeting for your environment and serve the variation to all users. + +Refer to: [Cloudflare AI Models](https://developers.cloudflare.com/workers-ai/models/) for the full model list, and [LD AI Config docs](https://launchdarkly.com/docs/home/ai-configs/create) for configuration details. + + + +### 5. Run the Example + +```bash +cd packages/sdk/cloudflare-ai/example +yarn start +``` + +## Testing + +Call the worker with a `userId`: + +```bash +curl "http://localhost:8787?userId=user-123" +``` + +Expected response: + +```json +{ + "success": true, + "userId": "user-123", + "model": "@cf/meta/llama-3.1-8b-instruct-fast", + "provider": "cloudflare-workers-ai", + "joke": "Why do programmers prefer dark mode? Because light attracts bugs!", + "enabled": true +} +``` + +## How It Works + +1. Initialize clients: LaunchDarkly + AI client +2. Get AI Config: retrieves `random-joke` from LaunchDarkly +3. Call model: `wc = config.toWorkersAI(env.AI)` then `await config.tracker.trackWorkersAIMetrics(() => env.AI.run(wc.model, wc))` +4. Flush events: `ctx.waitUntil(ldClient.flush().finally(() => ldClient.close()))` + +## LaunchDarkly Features + +### Variations and Rollouts + +- Create multiple variations to compare models, temperatures, and styles +- Target specific users or segments; do percentage rollouts +- All changes sync automatically to Cloudflare KV + +### Metrics and AI Config Analytics + +- The SDK records `$ld:ai:generation`, `$ld:ai:tokens`, `$ld:ai:duration`, and `$ld:ai:ttft` events +- Events include `aiConfigKey`, `variationKey`, `version`, `model`, and `provider` +- This links Live events to your AI Config analytics in LaunchDarkly + +## Deployment + +```bash +yarn deploy +``` + + +## Model Options + +Pick any Workers AI model ID: + +- `@cf/meta/llama-3.1-8b-instruct-fast` — fast, good quality +- `@cf/meta/llama-3.3-70b-instruct-fp8-fast` — higher quality +- `@cf/mistralai/mistral-7b-instruct-v0.1` — alternative +- `@cf/qwen/qwq-32b` — advanced reasoning + +See: [Cloudflare AI Models](https://developers.cloudflare.com/workers-ai/models/) + diff --git a/packages/sdk/cloudflare-ai/example/package.json b/packages/sdk/cloudflare-ai/example/package.json new file mode 100644 index 0000000000..f4099fc98a --- /dev/null +++ b/packages/sdk/cloudflare-ai/example/package.json @@ -0,0 +1,20 @@ +{ + "name": "cloudflare-ai-example", + "version": "1.0.0", + "private": true, + "type": "module", + "scripts": { + "start": "wrangler dev", + "deploy": "wrangler deploy", + "build": "tsc" + }, + "dependencies": { + "@launchdarkly/cloudflare-server-sdk": "workspace:*", + "@launchdarkly/cloudflare-server-sdk-ai": "workspace:*" + }, + "devDependencies": { + "@cloudflare/workers-types": "^4.20230321.0", + "typescript": "^5.1.6", + "wrangler": "^4.0.0" + } +} diff --git a/packages/sdk/cloudflare-ai/example/src/index.ts b/packages/sdk/cloudflare-ai/example/src/index.ts new file mode 100644 index 0000000000..75aa39da98 --- /dev/null +++ b/packages/sdk/cloudflare-ai/example/src/index.ts @@ -0,0 +1,83 @@ +import { init } from '@launchdarkly/cloudflare-server-sdk'; +import { initAi } from '@launchdarkly/cloudflare-server-sdk-ai'; + +interface Env { + LD_CLIENT_ID: string; + LD_KV: KVNamespace; + AI: Ai; +} + +export default { + async fetch(request: Request, env: Env, ctx: ExecutionContext): Promise { + try { + const url = new URL(request.url); + const userId = url.searchParams.get('userId') || 'anonymous-user'; + + const ldClient = init(env.LD_CLIENT_ID, env.LD_KV, { sendEvents: true }); + await ldClient.waitForInitialization(); + + // Pass KV namespace and client ID so AI SDK can read AI Configs directly + const aiClient = initAi(ldClient, { clientSideID: env.LD_CLIENT_ID, kvNamespace: env.LD_KV }); + + const context = { + kind: 'user', + key: userId, + }; + + const config = await aiClient.config( + 'random-joke', + context, + { + enabled: false, + }, + ); + + if (!config.enabled) { + return new Response( + JSON.stringify({ + success: false, + message: 'AI feature is not enabled for this user', + userId, + }), + { + status: 503, + headers: { 'Content-Type': 'application/json' }, + }, + ); + } + + const wc = config.toWorkersAI(env.AI); + // Workers AI bindings have many possible outputs; cast to a minimal type that includes optional usage. + type WorkersAIResultWithUsage = { usage?: { prompt_tokens?: number; completion_tokens?: number; total_tokens?: number; input_tokens?: number; output_tokens?: number } } | unknown; + const response = (await config.tracker.trackWorkersAIMetrics(() => env.AI.run(wc.model as any, wc as any))) as WorkersAIResultWithUsage; + + // Ensure events are flushed after the response is returned + ctx.waitUntil(ldClient.flush().finally(() => ldClient.close())); + + return new Response( + JSON.stringify({ + success: true, + userId, + model: config.model?.name, + provider: config.provider?.name || 'cloudflare-workers-ai', + joke: (response as any)?.response || (response as any), + enabled: config.enabled, + }), + { + headers: { 'Content-Type': 'application/json' }, + }, + ); + } catch (error) { + return new Response( + JSON.stringify({ + success: false, + error: error instanceof Error ? error.message : 'Unknown error', + }), + { + status: 500, + headers: { 'Content-Type': 'application/json' }, + }, + ); + } + }, +}; diff --git a/packages/sdk/cloudflare-ai/example/tsconfig.json b/packages/sdk/cloudflare-ai/example/tsconfig.json new file mode 100644 index 0000000000..330d89fafb --- /dev/null +++ b/packages/sdk/cloudflare-ai/example/tsconfig.json @@ -0,0 +1,17 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "ES2020", + "lib": ["ES2020"], + "outDir": "./dist", + "rootDir": "./src", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "moduleResolution": "node", + "resolveJsonModule": true, + "types": ["@cloudflare/workers-types"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/sdk/cloudflare-ai/example/wrangler.toml b/packages/sdk/cloudflare-ai/example/wrangler.toml new file mode 100644 index 0000000000..67e3b5239b --- /dev/null +++ b/packages/sdk/cloudflare-ai/example/wrangler.toml @@ -0,0 +1,22 @@ +name = "cloudflare-ai-example" +main = "dist/index.js" +compatibility_date = "2024-01-01" +compatibility_flags = ["nodejs_compat"] + +# KV namespaces for LaunchDarkly data +# Make sure this ID matches the namespace in your LaunchDarkly Cloudflare KV integration +kv_namespaces = [{ binding = "LD_KV", id = "YOUR_KV_ID", preview_id = "YOUR_PREVIEW_KV_ID" }] + +# Replace with your LaunchDarkly client-side ID +[vars] +LD_CLIENT_ID = "LD_CLIENT_ID" + +# Cloudflare AI binding +[ai] +binding = "AI" + +[build] +command = "npx tsc" + +[observability] +enabled = true diff --git a/packages/sdk/cloudflare-ai/jest.config.cjs b/packages/sdk/cloudflare-ai/jest.config.cjs new file mode 100644 index 0000000000..220c987fbe --- /dev/null +++ b/packages/sdk/cloudflare-ai/jest.config.cjs @@ -0,0 +1,19 @@ +/** @type {import('jest').Config} */ +module.exports = { + preset: 'ts-jest', + testEnvironment: 'node', + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json'], + collectCoverageFrom: ['src/**/*.ts'], + testMatch: ['**/__tests__/**/*.test.ts'], + transform: { + '^.+\\.tsx?$': [ + 'ts-jest', + { + tsconfig: { + esModuleInterop: true, + }, + }, + ], + }, +}; + diff --git a/packages/sdk/cloudflare-ai/package.json b/packages/sdk/cloudflare-ai/package.json new file mode 100644 index 0000000000..7f59869975 --- /dev/null +++ b/packages/sdk/cloudflare-ai/package.json @@ -0,0 +1,79 @@ +{ + "name": "@launchdarkly/cloudflare-server-sdk-ai", + "version": "0.1.0", + "description": "LaunchDarkly AI SDK for Cloudflare Workers", + "homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/sdk/cloudflare-ai", + "repository": { + "type": "git", + "url": "https://github.com/launchdarkly/js-core.git" + }, + "type": "module", + "exports": { + "types": "./dist/index.d.ts", + "import": "./dist/esm/index.js", + "require": "./dist/cjs/index.js" + }, + "main": "./dist/cjs/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "clean": "rimraf dist", + "rb": "rollup -c --configPlugin typescript", + "rbw": "yarn rb --watch", + "build": "yarn clean && yarn rb", + "lint": "npx eslint . --ext .ts", + "prettier": "prettier --write '**/*.@(js|ts|tsx|json|css)' --ignore-path ../../../.prettierignore", + "lint:fix": "yarn run lint --fix", + "check": "yarn prettier && yarn lint && yarn build && yarn test", + "test": "jest" + }, + "keywords": [ + "launchdarkly", + "cloudflare", + "workers", + "ai", + "llm", + "workers-ai" + ], + "author": "LaunchDarkly", + "license": "Apache-2.0", + "dependencies": { + "mustache": "^4.2.0" + }, + "peerDependencies": { + "@launchdarkly/cloudflare-server-sdk": "2.x" + }, + "devDependencies": { + "@cloudflare/workers-types": "^4.20230321.0", + "@launchdarkly/cloudflare-server-sdk": "2.7.10", + "@rollup/plugin-commonjs": "^25.0.4", + "@rollup/plugin-json": "^6.0.0", + "@rollup/plugin-node-resolve": "^15.2.1", + "@rollup/plugin-terser": "^0.4.3", + "@rollup/plugin-typescript": "^11.1.3", + "@trivago/prettier-plugin-sort-imports": "^4.1.1", + "@types/jest": "^29.5.3", + "@types/mustache": "^4.2.5", + "@typescript-eslint/eslint-plugin": "^6.20.0", + "@typescript-eslint/parser": "^6.20.0", + "eslint": "^8.45.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-airbnb-typescript": "^17.1.0", + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-import": "^2.27.5", + "eslint-plugin-jest": "^27.6.3", + "eslint-plugin-prettier": "^5.0.0", + "jest": "^29.6.1", + "prettier": "^3.0.0", + "rimraf": "^5.0.1", + "rollup": "^3.29.2", + "rollup-plugin-dts": "^6.0.2", + "rollup-plugin-esbuild": "^5.0.0", + "rollup-plugin-filesize": "^10.0.0", + "ts-jest": "^29.1.1", + "typescript": "5.1.6", + "wrangler": "latest" + } +} diff --git a/packages/sdk/cloudflare-ai/rollup.config.ts b/packages/sdk/cloudflare-ai/rollup.config.ts new file mode 100644 index 0000000000..dd195586b5 --- /dev/null +++ b/packages/sdk/cloudflare-ai/rollup.config.ts @@ -0,0 +1,53 @@ +import commonjs from '@rollup/plugin-commonjs'; +import json from '@rollup/plugin-json'; +import resolve from '@rollup/plugin-node-resolve'; +import terser from '@rollup/plugin-terser'; +import dts from 'rollup-plugin-dts'; +import esbuild from 'rollup-plugin-esbuild'; +import filesize from 'rollup-plugin-filesize'; + +const inputPath = 'src/index.ts'; +const cjsPath = 'dist/cjs/index.js'; +const esmPath = 'dist/esm/index.js'; +const typingsPath = 'dist/index.d.ts'; + +const plugins = [resolve(), commonjs(), esbuild(), json(), terser(), filesize()]; + +// the second array item is a function to include all js-core packages in the bundle so they +// are not imported or required as separate npm packages +const external = [/node_modules/, (id) => !id.includes('js-core')]; + +export default [ + { + input: inputPath, + output: [ + { + file: cjsPath, + format: 'cjs', + sourcemap: true, + }, + ], + plugins, + external, + }, + { + input: inputPath, + output: [ + { + file: esmPath, + format: 'esm', + sourcemap: true, + }, + ], + plugins, + external, + }, + { + input: inputPath, + plugins: [dts(), json()], + output: { + file: typingsPath, + format: 'esm', + }, + }, +]; diff --git a/packages/sdk/cloudflare-ai/src/ClientKVMeta.ts b/packages/sdk/cloudflare-ai/src/ClientKVMeta.ts new file mode 100644 index 0000000000..03c3acac8b --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/ClientKVMeta.ts @@ -0,0 +1,19 @@ +import type { KVNamespace } from '@cloudflare/workers-types'; + +import type { LDClientMin } from './LDClientMin'; + +type KvMeta = { clientSideID: string; kvNamespace: KVNamespace }; + +const clientToKvMeta = new WeakMap(); + +export function setClientKVMeta( + ldClient: LDClientMin, + clientSideID: string, + kvNamespace: KVNamespace, +): void { + clientToKvMeta.set(ldClient, { clientSideID, kvNamespace }); +} + +export function getClientKVMeta(ldClient: LDClientMin): KvMeta | null { + return clientToKvMeta.get(ldClient) ?? null; +} diff --git a/packages/sdk/cloudflare-ai/src/LDAIClientImpl.ts b/packages/sdk/cloudflare-ai/src/LDAIClientImpl.ts new file mode 100644 index 0000000000..e2699b9a6b --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/LDAIClientImpl.ts @@ -0,0 +1,336 @@ +import mustache from 'mustache'; + +import { LDContext } from '@launchdarkly/cloudflare-server-sdk'; + +import type { LDAIAgent, LDAIAgentConfig, LDAIAgentDefaults } from './api/agents/LDAIAgent'; +import type { + LDAIConfig, + LDAIDefaults, + LDMessage, + LDModelConfig, + LDProviderConfig, + WorkersAIConfig, + WorkersAIMapOptions, +} from './api/config/LDAIConfig'; +import type { LDAIConfigTracker } from './api/config/LDAIConfigTracker'; +import type { LDAIClient } from './api/LDAIClient'; +import { getClientKVMeta } from './ClientKVMeta'; +import { LDAIConfigTrackerImpl } from './LDAIConfigTrackerImpl'; +import type { LDClientMin } from './LDClientMin'; + +/** + * Metadata from LaunchDarkly variation. + */ +interface LDMeta { + variationKey: string; + enabled: boolean; + version?: number; +} + +/** + * Internal variation content from LaunchDarkly. + */ +interface VariationContent { + model?: LDModelConfig; + messages?: LDMessage[]; + provider?: LDProviderConfig; + // eslint-disable-next-line no-underscore-dangle + _ldMeta?: LDMeta; +} + +/** + * Result of evaluating a configuration. + */ +interface EvaluationResult { + tracker: LDAIConfigTracker; + enabled: boolean; + model?: LDModelConfig; + provider?: LDProviderConfig; + messages?: LDMessage[]; +} + +/** + * Implementation of the AI client for Cloudflare Workers. + */ +export class LDAIClientImpl implements LDAIClient { + constructor(private readonly _ldClient: LDClientMin) {} + + private _interpolateTemplate(template: string, variables: Record): string { + return mustache.render(template, variables, undefined, { escape: (item: any) => item }); + } + + private _mapWorkersAIParameters(params: Record): Record { + const mapped: Record = {}; + // eslint-disable-next-line no-restricted-syntax + for (const [key, value] of Object.entries(params)) { + const k = key.toLowerCase(); + if (k === 'maxtokens' || k === 'max_tokens') mapped.max_tokens = value; + else if (k === 'topp' || k === 'top_p') mapped.top_p = value; + else if (k === 'topk' || k === 'top_k') mapped.top_k = value; + else if (k === 'frequencypenalty' || k === 'frequency_penalty') + mapped.frequency_penalty = value; + else if (k === 'presencepenalty' || k === 'presence_penalty') mapped.presence_penalty = value; + else if (k === 'temperature') mapped.temperature = value; + else mapped[key] = value; + } + return mapped; + } + + private _toWorkersAI( + model: LDModelConfig | undefined, + messages: LDMessage[] | undefined, + options?: WorkersAIMapOptions, + ): WorkersAIConfig { + const out: WorkersAIConfig = { model: options?.modelOverride || model?.name || '' }; + if (messages && messages.length > 0) { + out.messages = messages.map((m) => ({ role: m.role, content: m.content })); + } + if (model?.parameters) Object.assign(out, this._mapWorkersAIParameters(model.parameters)); + if (options?.stream !== undefined) out.stream = options.stream; + if (options?.additionalParams) Object.assign(out, options.additionalParams); + return out; + } + + /** + * Reads AI Config directly from KV, bypassing standard flag evaluation. + * AI Configs are pre-evaluated by LaunchDarkly's backend and stored under + * the environment payload in Cloudflare KV. When available, prefer this + * path so the selected variation (including model/provider/messages and + * _ldMeta) is used exactly as defined in LaunchDarkly. + */ + private async _readAIConfigFromKV(key: string): Promise { + const kvMeta = getClientKVMeta(this._ldClient); + if (!kvMeta) { + return null; + } + + try { + const kvKey = `LD-Env-${kvMeta.clientSideID}`; + const data = await kvMeta.kvNamespace.get(kvKey, 'json'); + if (!data || typeof data !== 'object') { + return null; + } + + const { flags } = data as any; + if (!flags || typeof flags !== 'object') { + return null; + } + + const aiConfig = flags[key]; + if (!aiConfig) { + return null; + } + + // AI Configs are pre-evaluated and placed under `value` by the backend. + if (aiConfig.value && typeof aiConfig.value === 'object') { + return aiConfig.value as VariationContent; + } + + return null; + } catch (_err) { + return null; + } + } + + private async _evaluate( + key: string, + context: LDContext, + defaultValue: LDAIDefaults, + ): Promise { + // Read the pre-evaluated AI Config from KV. + const kvValue = await this._readAIConfigFromKV(key); + + let value: VariationContent; + if (kvValue) { + value = kvValue; + } else { + // Fallback to Cloudflare server SDK evaluation (reads from KV for flags) + const detail: any = await this._ldClient.variationDetail(key, context, defaultValue); + if (detail && detail.value && typeof detail.value === 'object' && 'value' in detail.value) { + value = detail.value.value; + } else if (detail && detail.value) { + value = detail.value; + } else { + // Last resort: use default from code + value = defaultValue as VariationContent; + } + } + + const tracker = new LDAIConfigTrackerImpl( + this._ldClient, + key, + // eslint-disable-next-line no-underscore-dangle + value._ldMeta?.variationKey ?? '', + // eslint-disable-next-line no-underscore-dangle + value._ldMeta?.version ?? 1, + value.model?.name ?? '', + value.provider?.name ?? 'cloudflare-workers-ai', + context, + ); + + // eslint-disable-next-line no-underscore-dangle + const enabled = !!value._ldMeta?.enabled; + + return { + tracker, + enabled, + model: value.model, + provider: value.provider, + messages: value.messages, + }; + } + + async config( + key: string, + context: LDContext, + defaultValue: LDAIDefaults, + variables?: Record, + ): Promise { + this._ldClient.track('$ld:ai:generation', this._sanitizeContext(context), key, 1); + + const { tracker, enabled, model, provider, messages } = await this._evaluate( + key, + context, + defaultValue, + ); + + const config: Omit = { + tracker, + enabled, + }; + + if (model) { + config.model = { ...model }; + } + + if (provider) { + config.provider = { ...provider }; + } + + const allVariables = this._createTemplateVariables(context, variables); + + if (messages) { + config.messages = messages.map((entry: LDMessage) => ({ + ...entry, + content: this._interpolateTemplate(entry.content, allVariables), + })); + } + + return { + ...config, + toWorkersAI: (_binding, options?: WorkersAIMapOptions): WorkersAIConfig => + this._toWorkersAI(config.model, config.messages, options), + }; + } + + async agent( + key: string, + context: LDContext, + defaultValue: LDAIAgentDefaults, + variables?: Record, + ): Promise { + this._ldClient.track('$ld:ai:agent:function:single', this._sanitizeContext(context), key, 1); + + const { tracker, enabled, model, provider, messages } = await this._evaluate( + key, + context, + defaultValue as any, + ); + + const allVariables = this._createTemplateVariables(context, variables); + const instructionsRaw = (defaultValue?.instructions ?? '') as string; + const instructions = instructionsRaw + ? this._interpolateTemplate(instructionsRaw, allVariables) + : undefined; + + const agent: Omit = { + tracker, + enabled, + model, + provider, + instructions, + }; + + return { + ...agent, + toWorkersAI: (_binding, options?: WorkersAIMapOptions): WorkersAIConfig => + this._toWorkersAI(agent.model, messages, options), + } as LDAIAgent; + } + + async agents( + agentConfigs: TConfigs, + context: LDContext, + ): Promise> { + const results = await Promise.all( + agentConfigs.map((cfg) => this.agent(cfg.key, context, cfg.defaultValue, cfg.variables)), + ); + const map = {} as Record; + agentConfigs.forEach((cfg, idx) => { + map[cfg.key] = results[idx]; + }); + return map as Record; + } + + private _sanitizeContext(context: LDContext): LDContext { + if (!context || typeof context !== 'object') { + return context; + } + + const ctx = context as Record; + const kind = typeof ctx.kind === 'string' ? (ctx.kind as string) : undefined; + + if (kind && kind !== 'multi') { + const sanitized: Record = { kind }; + if (typeof ctx.key === 'string') { + sanitized.key = ctx.key; + } + return sanitized as LDContext; + } + + if (kind === 'multi') { + const sanitized: Record = { kind: 'multi' }; + Object.keys(ctx).forEach((k) => { + if (k === 'kind') { + return; + } + const value = ctx[k]; + if (!value || typeof value !== 'object') { + return; + } + const sub = value as Record; + const subKind = typeof sub.kind === 'string' ? (sub.kind as string) : undefined; + if (!subKind) { + return; + } + const sanitizedSub: Record = { kind: subKind }; + if (typeof sub.key === 'string') { + sanitizedSub.key = sub.key; + } + sanitized[k] = sanitizedSub; + }); + return sanitized as LDContext; + } + + return context; + } + + private _createTemplateVariables( + context: LDContext, + variables?: Record, + ): Record { + const contextVars: Record = {}; + if (context && typeof context === 'object') { + Object.assign(contextVars, context as Record); + if ('name' in context) { + contextVars.username = (context as any).name; + } + } + + return { + ...contextVars, + ...variables, + ldctx: context, + }; + } +} diff --git a/packages/sdk/cloudflare-ai/src/LDAIConfigTrackerImpl.ts b/packages/sdk/cloudflare-ai/src/LDAIConfigTrackerImpl.ts new file mode 100644 index 0000000000..8cad408d95 --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/LDAIConfigTrackerImpl.ts @@ -0,0 +1,193 @@ +import { LDContext } from '@launchdarkly/cloudflare-server-sdk'; + +import type { LDAIConfigTracker, LDAIMetricSummary } from './api/config/LDAIConfigTracker'; +import type { LDFeedbackKind, LDTokenUsage } from './api/metrics'; +import type { LDClientMin } from './LDClientMin'; + +/** + * Implementation of AI configuration tracker for metrics and analytics. + */ +export class LDAIConfigTrackerImpl implements LDAIConfigTracker { + private _tracked: LDAIMetricSummary = {}; + constructor( + private readonly _ldClient: LDClientMin, + private readonly _configKey: string, + private readonly _variationKey: string, + private readonly _version: number, + private readonly _modelName: string, + private readonly _providerName: string, + private readonly _context: LDContext, + ) {} + + trackSuccess(): void { + this._ldClient.track('$ld:ai:generation:success', this._context, this._createBaseMetadata(), 1); + this._tracked.success = true; + } + + trackError(): void { + this._ldClient.track('$ld:ai:generation:error', this._context, this._createBaseMetadata(), 1); + this._tracked.success = false; + } + + trackDuration(durationMs: number): void { + this._ldClient.track( + '$ld:ai:duration:total', + this._context, + this._createBaseMetadata(), + durationMs, + ); + this._tracked.durationMs = durationMs; + } + + trackMetrics(metrics: { durationMs: number; usage?: LDTokenUsage; success: boolean }): void { + this._tracked.durationMs = metrics.durationMs; + this.trackDuration(metrics.durationMs); + if (metrics.success) { + this.trackSuccess(); + } else { + this.trackError(); + } + if (metrics.usage) { + this.trackTokens(metrics.usage); + } + } + + trackTokens(usage: LDTokenUsage): void { + const metadata = this._createBaseMetadata(); + if (usage.total > 0) { + this._ldClient.track('$ld:ai:tokens:total', this._context, metadata, usage.total); + } + if (usage.input > 0) { + this._ldClient.track('$ld:ai:tokens:input', this._context, metadata, usage.input); + } + if (usage.output > 0) { + this._ldClient.track('$ld:ai:tokens:output', this._context, metadata, usage.output); + } + this._tracked.tokens = usage; + } + + trackFeedback(kind: LDFeedbackKind): void { + if (kind === 'positive') { + this._ldClient.track( + '$ld:ai:feedback:user:positive', + this._context, + this._createBaseMetadata(), + 1, + ); + } else if (kind === 'negative') { + this._ldClient.track( + '$ld:ai:feedback:user:negative', + this._context, + this._createBaseMetadata(), + 1, + ); + } + this._tracked.feedback = { kind } as any; + } + + trackTimeToFirstToken(timeToFirstTokenMs: number): void { + this._ldClient.track( + '$ld:ai:tokens:ttf', + this._context, + this._createBaseMetadata(), + timeToFirstTokenMs, + ); + this._tracked.timeToFirstTokenMs = timeToFirstTokenMs; + } + + async trackDurationOf(func: () => Promise): Promise { + const start = Date.now(); + try { + const res = await func(); + this.trackDuration(Date.now() - start); + return res; + } catch (e) { + this.trackDuration(Date.now() - start); + throw e; + } + } + + async trackWorkersAIMetrics< + T extends { + usage?: { + prompt_tokens?: number; + completion_tokens?: number; + total_tokens?: number; + input_tokens?: number; + output_tokens?: number; + }; + }, + >(func: () => Promise): Promise { + const start = Date.now(); + try { + const res = await func(); + const duration = Date.now() - start; + this.trackDuration(duration); + this.trackSuccess(); + const usage = res?.usage; + if (usage) { + const input = (usage as any).prompt_tokens ?? (usage as any).input_tokens ?? 0; + const output = (usage as any).completion_tokens ?? (usage as any).output_tokens ?? 0; + const total = (usage as any).total_tokens ?? input + output; + this.trackTokens({ input, output, total }); + } + return res; + } catch (e) { + const duration = Date.now() - start; + this.trackDuration(duration); + this.trackError(); + throw e; + } + } + + trackWorkersAIStreamMetrics< + T extends { + usage?: Promise<{ + prompt_tokens?: number; + completion_tokens?: number; + total_tokens?: number; + input_tokens?: number; + output_tokens?: number; + }>; + finishReason?: Promise; + }, + >(func: () => T): T { + const start = Date.now(); + const stream = func(); + // Best effort: attach handlers if promises exist + (async () => { + try { + await stream.finishReason?.catch?.(() => undefined); + const duration = Date.now() - start; + this.trackDuration(duration); + this.trackSuccess(); + const usage = await (stream.usage ?? Promise.resolve(undefined)); + if (usage) { + const input = (usage as any).prompt_tokens ?? (usage as any).input_tokens ?? 0; + const output = (usage as any).completion_tokens ?? (usage as any).output_tokens ?? 0; + const total = (usage as any).total_tokens ?? input + output; + this.trackTokens({ input, output, total }); + } + } catch (e) { + const duration = Date.now() - start; + this.trackDuration(duration); + this.trackError(); + } + })(); + return stream; + } + + getSummary(): LDAIMetricSummary { + return { ...this._tracked }; + } + + private _createBaseMetadata(): Record { + return { + aiConfigKey: this._configKey, + variationKey: this._variationKey, + version: this._version, + model: this._modelName, + provider: this._providerName, + }; + } +} diff --git a/packages/sdk/cloudflare-ai/src/LDClientMin.ts b/packages/sdk/cloudflare-ai/src/LDClientMin.ts new file mode 100644 index 0000000000..4577c55439 --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/LDClientMin.ts @@ -0,0 +1,23 @@ +import { LDContext, LDEvaluationDetail, LDFlagValue } from '@launchdarkly/cloudflare-server-sdk'; + +/** + * Interface which represents the required interface components for the Cloudflare server SDK + * to work with the AI SDK. + */ +export interface LDClientMin { + variation( + key: string, + context: LDContext, + defaultValue: LDFlagValue, + callback?: (err: any, res: LDFlagValue) => void, + ): Promise; + + variationDetail( + key: string, + context: LDContext, + defaultValue: LDFlagValue, + callback?: (err: any, res: LDEvaluationDetail) => void, + ): Promise; + + track(key: string, context: LDContext, data?: any, metricValue?: number): void; +} diff --git a/packages/sdk/cloudflare-ai/src/api/LDAIClient.ts b/packages/sdk/cloudflare-ai/src/api/LDAIClient.ts new file mode 100644 index 0000000000..9af51e2b56 --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/api/LDAIClient.ts @@ -0,0 +1,60 @@ +import { LDContext } from '@launchdarkly/cloudflare-server-sdk'; + +import type { LDAIAgent, LDAIAgentConfig, LDAIAgentDefaults } from './agents/LDAIAgent'; +import type { LDAIConfig, LDAIDefaults } from './config/LDAIConfig'; + +/** + * Interface for performing AI operations with LaunchDarkly. + */ +export interface LDAIClient { + /** + * Retrieves and processes an AI configuration from LaunchDarkly. + * + * @param key The key of the AI configuration in LaunchDarkly. + * @param context The LaunchDarkly context for evaluation. + * @param defaultValue Fallback configuration if LaunchDarkly is unavailable. + * @param variables Variables for template interpolation in messages. + * @returns The AI configuration with tracker and conversion methods. + * + * @example + * ```typescript + * const context = { kind: 'user', key: 'example-user-key', name: 'Sandy' }; + * const config = await aiClient.config( + * 'chat-assistant', + * context, + * {}, + * { myVariable: 'My User Defined Variable' } + * ); + * + * if (config.enabled) { + * const wc = config.toWorkersAI(env.AI); + * const response = await env.AI.run(wc.model, wc); + * config.tracker.trackSuccess(); + * } + * ``` + */ + config( + key: string, + context: LDContext, + defaultValue: LDAIDefaults, + variables?: Record, + ): Promise; + + /** + * Retrieves and processes a single AI Config agent including customized instructions. + */ + agent( + key: string, + context: LDContext, + defaultValue: LDAIAgentDefaults, + variables?: Record, + ): Promise; + + /** + * Retrieves and processes multiple AI Config agents and returns a map of key to agent. + */ + agents( + agentConfigs: TConfigs, + context: LDContext, + ): Promise>; +} diff --git a/packages/sdk/cloudflare-ai/src/api/agents/LDAIAgent.ts b/packages/sdk/cloudflare-ai/src/api/agents/LDAIAgent.ts new file mode 100644 index 0000000000..b065492609 --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/api/agents/LDAIAgent.ts @@ -0,0 +1,18 @@ +import type { LDAIConfig } from '../config/LDAIConfig'; + +export interface LDAIAgent extends Omit { + instructions?: string; +} + +export interface LDAIAgentConfig { + key: string; + defaultValue: LDAIAgentDefaults; + variables?: Record; +} + +export interface LDAIAgentDefaults { + enabled: boolean; + instructions?: string; + model?: LDAIConfig['model']; + provider?: LDAIConfig['provider']; +} diff --git a/packages/sdk/cloudflare-ai/src/api/agents/index.ts b/packages/sdk/cloudflare-ai/src/api/agents/index.ts new file mode 100644 index 0000000000..f68fcd9a24 --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/api/agents/index.ts @@ -0,0 +1 @@ +export * from './LDAIAgent'; diff --git a/packages/sdk/cloudflare-ai/src/api/config/LDAIConfig.ts b/packages/sdk/cloudflare-ai/src/api/config/LDAIConfig.ts new file mode 100644 index 0000000000..cdadf8560e --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/api/config/LDAIConfig.ts @@ -0,0 +1,182 @@ +import type { Ai } from '@cloudflare/workers-types'; + +import type { LDAIConfigTracker } from './LDAIConfigTracker'; + +/** + * Configuration for an AI model. + */ +export interface LDModelConfig { + /** + * The Workers AI model identifier as expected by Cloudflare, for example: + * "@cf/meta/llama-3.3-70b-instruct-fp8-fast". + */ + name: string; + + /** + * Model-specific parameters such as temperature, max_tokens, top_p, etc. + */ + parameters?: { [index: string]: unknown }; + + /** + * Additional user-specified custom parameters. + */ + custom?: { [index: string]: unknown }; +} + +/** + * Configuration for the AI provider. + */ +export interface LDProviderConfig { + /** + * The name of the provider (e.g., "cloudflare-workers-ai"). + */ + name: string; +} + +/** + * A message in a conversation or prompt. + */ +export interface LDMessage { + /** + * The role of the message sender. + */ + role: 'user' | 'assistant' | 'system'; + + /** + * The content of the message. + */ + content: string; +} + +/** + * Options for mapping to Cloudflare Workers AI format. + */ +export interface WorkersAIMapOptions { + /** + * Override the model name. + */ + modelOverride?: string; + + /** + * Enable streaming responses. + */ + stream?: boolean; + + /** + * Additional parameters to merge into the configuration. + */ + additionalParams?: Record; +} + +/** + * Configuration format for Cloudflare Workers AI API. + */ +export type WorkersAIConfig = { + /** + * The Cloudflare Workers AI model ID. + */ + model: string; + + /** + * Messages for chat completion models. + */ + messages?: Array<{ role: string; content: string }>; + + /** + * Enable streaming. + */ + stream?: boolean; + + /** + * Maximum tokens to generate. + */ + max_tokens?: number; + + /** + * Sampling temperature. + */ + temperature?: number; + + /** + * Top-p sampling. + */ + top_p?: number; + + /** + * Top-k sampling. + */ + top_k?: number; + + /** + * Frequency penalty. + */ + frequency_penalty?: number; + + /** + * Presence penalty. + */ + presence_penalty?: number; + + /** + * Additional parameters. + */ + [key: string]: unknown; +}; + +/** + * AI configuration from LaunchDarkly with tracker and conversion methods. + */ +export interface LDAIConfig { + /** + * Model configuration. + */ + model?: LDModelConfig; + + /** + * Messages for the model. + */ + messages?: LDMessage[]; + + /** + * Provider configuration. + */ + provider?: LDProviderConfig; + + /** + * Tracker for metrics and analytics. + */ + tracker: LDAIConfigTracker; + + /** + * Whether the configuration is enabled. + */ + enabled: boolean; + + /** + * Converts this configuration to Cloudflare Workers AI format. + * + * @param options Optional mapping options. + * @returns Configuration ready to use with Cloudflare Workers AI. + */ + toWorkersAI(binding: Ai, options?: WorkersAIMapOptions): WorkersAIConfig; + + /** + * Convenience helper that maps to Cloudflare Workers AI config, runs the model via + * the provided AI binding, and automatically records metrics via the tracker. + * + * @param aiBinding The Cloudflare Workers AI binding (env.AI) + * @param options Optional mapping options for Cloudflare Workers AI + * @returns Provider-specific response from Workers AI + */ + // Cloudflare convenience runner is provided separately; no inline runner here. +} + +/** + * Default AI configuration (without tracker and conversion methods). + */ +export type LDAIDefaults = Omit & { + /** + * Whether the configuration is enabled. Defaults to false. + */ + enabled?: boolean; +}; diff --git a/packages/sdk/cloudflare-ai/src/api/config/LDAIConfigTracker.ts b/packages/sdk/cloudflare-ai/src/api/config/LDAIConfigTracker.ts new file mode 100644 index 0000000000..230e9fcee4 --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/api/config/LDAIConfigTracker.ts @@ -0,0 +1,92 @@ +import type { LDFeedbackKind, LDTokenUsage } from '../metrics'; + +export interface LDAIMetricSummary { + durationMs?: number; + tokens?: LDTokenUsage; + success?: boolean; + feedback?: { kind: LDFeedbackKind }; + timeToFirstTokenMs?: number; +} + +/** + * Tracker for AI configuration metrics and analytics. + */ +export interface LDAIConfigTracker { + /** + * Track a successful AI operation. + */ + trackSuccess(): void; + + /** + * Track an unsuccessful AI operation. + */ + trackError(): void; + + /** + * Track the duration of an AI operation in milliseconds. + * + * @param durationMs The duration in milliseconds. + */ + trackDuration(durationMs: number): void; + + /** + * Track comprehensive metrics for an AI operation. + * + * @param metrics Object containing duration, token usage, and success status. + */ + trackMetrics(metrics: { durationMs: number; usage?: LDTokenUsage; success: boolean }): void; + + /** + * Track only token usage for an AI operation. + * + * @param usage Token usage information. + */ + trackTokens(usage: LDTokenUsage): void; + + /** + * Track user feedback for an AI operation. + * + * @param kind The type of feedback (positive or negative). + */ + trackFeedback(kind: LDFeedbackKind): void; + + /** + * Track time to first token (TTFT) in milliseconds. + * + * @param timeToFirstTokenMs Milliseconds until first token. + */ + trackTimeToFirstToken(timeToFirstTokenMs: number): void; + + trackDurationOf(func: () => Promise): Promise; + + trackWorkersAIMetrics< + T extends { + usage?: { + prompt_tokens?: number; + completion_tokens?: number; + total_tokens?: number; + input_tokens?: number; + output_tokens?: number; + }; + }, + >( + func: () => Promise, + ): Promise; + + trackWorkersAIStreamMetrics< + T extends { + usage?: Promise<{ + prompt_tokens?: number; + completion_tokens?: number; + total_tokens?: number; + input_tokens?: number; + output_tokens?: number; + }>; + finishReason?: Promise; + }, + >( + func: () => T, + ): T; + + getSummary(): LDAIMetricSummary; +} diff --git a/packages/sdk/cloudflare-ai/src/api/config/index.ts b/packages/sdk/cloudflare-ai/src/api/config/index.ts new file mode 100644 index 0000000000..2bb488be5b --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/api/config/index.ts @@ -0,0 +1,2 @@ +export * from './LDAIConfig'; +export * from './LDAIConfigTracker'; diff --git a/packages/sdk/cloudflare-ai/src/api/index.ts b/packages/sdk/cloudflare-ai/src/api/index.ts new file mode 100644 index 0000000000..cd6333b027 --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/api/index.ts @@ -0,0 +1,4 @@ +export * from './config'; +export * from './agents'; +export * from './metrics'; +export * from './LDAIClient'; diff --git a/packages/sdk/cloudflare-ai/src/api/metrics/LDFeedbackKind.ts b/packages/sdk/cloudflare-ai/src/api/metrics/LDFeedbackKind.ts new file mode 100644 index 0000000000..1c78caf7be --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/api/metrics/LDFeedbackKind.ts @@ -0,0 +1,4 @@ +/** + * Feedback kinds for AI operations. + */ +export type LDFeedbackKind = 'positive' | 'negative'; diff --git a/packages/sdk/cloudflare-ai/src/api/metrics/LDTokenUsage.ts b/packages/sdk/cloudflare-ai/src/api/metrics/LDTokenUsage.ts new file mode 100644 index 0000000000..c9d6a8ad88 --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/api/metrics/LDTokenUsage.ts @@ -0,0 +1,8 @@ +/** + * Token usage tracking for AI operations. + */ +export interface LDTokenUsage { + input: number; + output: number; + total: number; +} diff --git a/packages/sdk/cloudflare-ai/src/api/metrics/index.ts b/packages/sdk/cloudflare-ai/src/api/metrics/index.ts new file mode 100644 index 0000000000..708ec15dbc --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/api/metrics/index.ts @@ -0,0 +1,2 @@ +export * from './LDTokenUsage'; +export * from './LDFeedbackKind'; diff --git a/packages/sdk/cloudflare-ai/src/index.ts b/packages/sdk/cloudflare-ai/src/index.ts new file mode 100644 index 0000000000..0a764d70d1 --- /dev/null +++ b/packages/sdk/cloudflare-ai/src/index.ts @@ -0,0 +1,38 @@ +/** + * This is the API reference for the LaunchDarkly AI SDK for Cloudflare Workers. + * + * In typical usage, you will call {@link initAi} once at startup time to obtain an instance of + * {@link LDAIClient}, which provides access to all of the SDK's AI configuration functionality. + * + * For more information, see the SDK reference guide. + * + * @packageDocumentation + */ +import type { KVNamespace } from '@cloudflare/workers-types'; + +import type { LDAIClient } from './api/LDAIClient'; +import { setClientKVMeta } from './ClientKVMeta'; +import { LDAIClientImpl } from './LDAIClientImpl'; +import type { LDClientMin } from './LDClientMin'; + +// KV metadata helpers moved to ClientKVMeta to avoid cycles + +/** + * Initialize a new AI client. This client will be used to perform AI configuration operations. + * @param ldClient The base LaunchDarkly Cloudflare client. + * @param clientSideID Optional client-side ID for direct AI Config reading from KV. + * @param kvNamespace Optional KV namespace for direct AI Config access. + * @returns A new AI client. + */ +export function initAi( + ldClient: LDClientMin, + options?: { clientSideID?: string; kvNamespace?: KVNamespace }, +): LDAIClient { + if (options?.clientSideID && options?.kvNamespace) { + setClientKVMeta(ldClient, options.clientSideID, options.kvNamespace); + } + return new LDAIClientImpl(ldClient); +} + +export * from './api'; +// No public mapper export; mapping is handled inline. diff --git a/packages/sdk/cloudflare-ai/tsconfig.eslint.json b/packages/sdk/cloudflare-ai/tsconfig.eslint.json new file mode 100644 index 0000000000..dfe985d505 --- /dev/null +++ b/packages/sdk/cloudflare-ai/tsconfig.eslint.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.json", + "include": ["src/**/*", "__tests__/**/*", "*.ts", "example/**/*"] +} diff --git a/packages/sdk/cloudflare-ai/tsconfig.json b/packages/sdk/cloudflare-ai/tsconfig.json new file mode 100644 index 0000000000..2f5d51d709 --- /dev/null +++ b/packages/sdk/cloudflare-ai/tsconfig.json @@ -0,0 +1,21 @@ +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": ".", + "declaration": true, + "declarationMap": true, + "module": "ES6", + "target": "ES2017", + "lib": ["es2020"], + "types": ["@cloudflare/workers-types", "jest", "node"], + "sourceMap": true, + "skipLibCheck": true, + "stripInternal": true, + "moduleResolution": "node", + "allowSyntheticDefaultImports": true, + "esModuleInterop": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/sdk/cloudflare-ai/tsconfig.ref.json b/packages/sdk/cloudflare-ai/tsconfig.ref.json new file mode 100644 index 0000000000..1012da0630 --- /dev/null +++ b/packages/sdk/cloudflare-ai/tsconfig.ref.json @@ -0,0 +1,6 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "composite": true + } +} diff --git a/packages/tooling/jest/README.md b/packages/tooling/jest/README.md index 1c00c17402..6e35f4eba7 100644 --- a/packages/tooling/jest/README.md +++ b/packages/tooling/jest/README.md @@ -28,9 +28,6 @@ Then in `jest.config.js` add `@launchdarkly/jest/{framework}` to setupFiles: ```js // jest.config.js module.exports = { - // for react - setupFiles: ['@launchdarkly/jest/react'], - // for react-native setupFiles: ['@launchdarkly/jest/react-native'], }; @@ -38,7 +35,32 @@ module.exports = { ## Quickstart -TODO: +// Welcome.test.tsx +import React from 'react'; +import { render } from '@testing-library/react-native'; +import { + mockFlags, + resetLDMocks, + getLDClient, +} from '@launchdarkly/js-core/tooling/jest'; +import Welcome from './Welcome'; + +afterEach(() => { + resetLDMocks(); +}); + +test('evaluates a boolean flag', () => { + mockFlags({ 'my-boolean-flag': true }); + const { getByText } = render(); + expect(getByText('Flag value is true')).toBeTruthy(); +}); + +test('captures a track call', () => { + const client = getLDClient(); // mocked client from LD jest tooling + client.track('event-name', { foo: 'bar' }); + expect(client.track).toHaveBeenCalledWith('event-name', { foo: 'bar' }); + expect(client.track).toHaveBeenCalledTimes(1); +}); ## Developing this package