diff --git a/.gitignore b/.gitignore index 8593096..c683bde 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,7 @@ doc/ /pkg/ /spec/reports/ /tmp/ - +.DS_Store # rspec failure tracking .rspec_status diff --git a/.rspec b/.rspec new file mode 100644 index 0000000..34c5164 --- /dev/null +++ b/.rspec @@ -0,0 +1,3 @@ +--format documentation +--color +--require spec_helper diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index caa5422..4283e4e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -43,12 +43,12 @@ The output will appear in `docs/build/html`. ## Code organization -A special case is the namespace `LaunchDarkly::AI::Impl`, and any namespaces within it. Everything under `Impl` is considered a private implementation detail: all files there are excluded from the generated documentation, and are considered subject to change at any time and not supported for direct use by application developers. We do this because Ruby's scope/visibility system is somewhat limited compared to other languages: a method can be `private` or `protected` within a class, but there is no way to make it visible to other classes in the library yet invisible to code outside of the library, and there is similarly no way to hide a class. +A special case is the namespace `LaunchDarkly::Server::AI::Impl`, and any namespaces within it. Everything under `Impl` is considered a private implementation detail: all files there are excluded from the generated documentation, and are considered subject to change at any time and not supported for direct use by application developers. We do this because Ruby's scope/visibility system is somewhat limited compared to other languages: a method can be `private` or `protected` within a class, but there is no way to make it visible to other classes in the library yet invisible to code outside of the library, and there is similarly no way to hide a class. So, if there is a class whose existence is entirely an implementation detail, it should be in `Impl`. Similarly, classes that are _not_ in `Impl` must not expose any public members that are not meant to be part of the supported public API. This is important because of our guarantee of backward compatibility for all public APIs within a major version: we want to be able to change our implementation details to suit the needs of the code, without worrying about breaking a customer's code. Due to how the language works, we can't actually prevent an application developer from referencing those classes in their code, but this convention makes it clear that such use is discouraged and unsupported. ## Documenting types and methods -All classes and public methods outside of `LaunchDarkly::AI::Impl` should have documentation comments. These are used to build the API documentation that is published at https://launchdarkly.github.io/ruby-server-sdk-ai/ and https://www.rubydoc.info/gems/launchdarkly-server-sdk-ai. The documentation generator is YARD; see https://yardoc.org/ for the comment format it uses. +All classes and public methods outside of `LaunchDarkly::Server::AI::Impl` should have documentation comments. These are used to build the API documentation that is published at https://launchdarkly.github.io/ruby-server-sdk-ai/ and https://www.rubydoc.info/gems/launchdarkly-server-sdk-ai. The documentation generator is YARD; see https://yardoc.org/ for the comment format it uses. Please try to make the style and terminology in documentation comments consistent with other documentation comments in the library. Also, if a class or method is being added that has an equivalent in other libraries, and if we have described it in a consistent away in those other libraries, please reuse the text whenever possible (with adjustments for anything language-specific) rather than writing new text. diff --git a/Gemfile b/Gemfile new file mode 100644 index 0000000..6d92065 --- /dev/null +++ b/Gemfile @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +source 'https://rubygems.org' + +# Specify your gem's dependencies in launchdarkly-server-sdk-ai.gemspec +gemspec diff --git a/bin/console b/bin/console new file mode 100755 index 0000000..295cd1b --- /dev/null +++ b/bin/console @@ -0,0 +1,11 @@ +#!/usr/bin/env ruby +# frozen_string_literal: true + +require 'bundler/setup' +require 'launchdarkly_server_sdk_ai' + +# You can add fixtures and/or initialization code here to make experimenting +# with your gem easier. You can also use a different console, if you like. + +require 'irb' +IRB.start(__FILE__) diff --git a/bin/setup b/bin/setup new file mode 100755 index 0000000..dce67d8 --- /dev/null +++ b/bin/setup @@ -0,0 +1,8 @@ +#!/usr/bin/env bash +set -euo pipefail +IFS=$'\n\t' +set -vx + +bundle install + +# Do any other automated setup that you need to do here diff --git a/examples/hello-bedrock/Gemfile b/examples/hello-bedrock/Gemfile new file mode 100644 index 0000000..83b1714 --- /dev/null +++ b/examples/hello-bedrock/Gemfile @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +source 'https://rubygems.org' + +gem 'aws-sdk-bedrockruntime' +gem 'launchdarkly-server-sdk-ai', path: '../../..' diff --git a/launchdarkly-server-sdk-ai.gemspec b/launchdarkly-server-sdk-ai.gemspec index b0eb6d0..fba31a0 100644 --- a/launchdarkly-server-sdk-ai.gemspec +++ b/launchdarkly-server-sdk-ai.gemspec @@ -1,23 +1,34 @@ # frozen_string_literal: true -require_relative "lib/ldclient-ai/version" +require_relative 'lib/server/ai/version' Gem::Specification.new do |spec| - spec.name = "launchdarkly-server-sdk-ai" - spec.version = LaunchDarkly::AI::VERSION - spec.authors = ["LaunchDarkly"] - spec.email = ["team@launchdarkly.com"] - spec.summary = "LaunchDarkly AI SDK for Ruby" - spec.description = "LaunchDarkly SDK AI Configs integration for the Ruby server side SDK" - spec.license = "Apache-2.0" - spec.homepage = "https://github.com/launchdarkly/ruby-server-sdk-ai" - spec.metadata["source_code_uri"] = "https://github.com/launchdarkly/ruby-server-sdk-ai" - spec.metadata["changelog_uri"] = "https://github.com/launchdarkly/ruby-server-sdk-ai/blob/main/CHANGELOG.md" + spec.name = 'launchdarkly-server-sdk-ai' + spec.version = LaunchDarkly::Server::AI::VERSION + spec.authors = ['LaunchDarkly'] + spec.email = ['team@launchdarkly.com'] + spec.summary = 'LaunchDarkly AI SDK for Ruby' + spec.description = 'LaunchDarkly SDK AI Configs integration for the Ruby server side SDK' + spec.license = 'Apache-2.0' + spec.homepage = 'https://github.com/launchdarkly/ruby-server-sdk-ai' + spec.metadata['source_code_uri'] = 'https://github.com/launchdarkly/ruby-server-sdk-ai' + spec.metadata['changelog_uri'] = 'https://github.com/launchdarkly/ruby-server-sdk-ai/blob/main/CHANGELOG.md' - spec.files = Dir["{lib}/**/*.rb", "bin/*", "LICENSE", "*.md"] + spec.files = Dir['{lib}/**/*.rb', 'bin/*', 'LICENSE', '*.md'] spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } - spec.require_paths = ["lib"] - spec.required_ruby_version = ">= 3.0.0" + spec.require_paths = ['lib'] + spec.required_ruby_version = '>= 3.0.0' - spec.add_runtime_dependency "launchdarkly-server-sdk", "~> 8.4.0" -end \ No newline at end of file + spec.add_dependency 'launchdarkly-server-sdk', '~> 8.5' + spec.add_dependency 'logger' + spec.add_dependency 'mustache', '~> 1.1' + + spec.add_development_dependency 'bundler', '~> 2.0' + spec.add_development_dependency 'debug', '~> 1.0' + spec.add_development_dependency 'rake', '~> 13.0' + spec.add_development_dependency 'rspec', '~> 3.0' + spec.add_development_dependency 'rubocop', '~> 1.21' + spec.add_development_dependency 'rubocop-performance', '~> 1.15' + spec.add_development_dependency 'rubocop-rake', '~> 0.6' + spec.add_development_dependency 'rubocop-rspec', '~> 3.6' +end diff --git a/lib/launchdarkly-server-sdk-ai.rb b/lib/launchdarkly-server-sdk-ai.rb index 22c8157..1abaeec 100644 --- a/lib/launchdarkly-server-sdk-ai.rb +++ b/lib/launchdarkly-server-sdk-ai.rb @@ -1,3 +1,30 @@ # frozen_string_literal: true -raise "Reserved for LaunchDarkly" \ No newline at end of file +require 'logger' +require 'mustache' + +require 'server/ai/version' +require 'server/ai/client' +require 'server/ai/ai_config_tracker' + +module LaunchDarkly + module Server + # + # Namespace for the LaunchDarkly AI SDK. + # + module AI + # + # @return [Logger] the Rails logger if in Rails, or a default Logger at WARN level otherwise + # + def self.default_logger + if defined?(Rails) && Rails.respond_to?(:logger) && Rails.logger + Rails.logger + else + log = ::Logger.new($stdout) + log.level = ::Logger::WARN + log + end + end + end + end +end diff --git a/lib/server/ai/ai_config_tracker.rb b/lib/server/ai/ai_config_tracker.rb new file mode 100644 index 0000000..2cb9337 --- /dev/null +++ b/lib/server/ai/ai_config_tracker.rb @@ -0,0 +1,245 @@ +# frozen_string_literal: true + +require 'ldclient-rb' + +module LaunchDarkly + module Server + module AI + # + # Tracks token usage for AI operations. + # + class TokenUsage + attr_reader :total, :input, :output + + # + # @param total [Integer] Total number of tokens used. + # @param input [Integer] Number of tokens in the prompt. + # @param output [Integer] Number of tokens in the completion. + # + def initialize(total: nil, input: nil, output: nil) + @total = total + @input = input + @output = output + end + end + + # + # Summary of metrics which have been tracked. + # + class MetricSummary + attr_accessor :duration, :success, :feedback, :usage, :time_to_first_token + + def initialize + @duration = nil + @success = nil + @feedback = nil + @usage = nil + @time_to_first_token = nil + end + end + + # + # The AIConfigTracker class is used to track AI configuration usage. + # + class AIConfigTracker + attr_reader :ld_client, :config_key, :context, :variation_key, :version, :summary + + def initialize(ld_client:, variation_key:, config_key:, version:, context:) + @ld_client = ld_client + @variation_key = variation_key + @config_key = config_key + @version = version + @context = context + @summary = MetricSummary.new + end + + # + # Track the duration of an AI operation + # + # @param duration [Integer] The duration in milliseconds + # + def track_duration(duration) + @summary.duration = duration + @ld_client.track( + '$ld:ai:duration:total', + @context, + flag_data, + duration + ) + end + + # + # Track the duration of a block of code + # + # @yield The block to measure + # @return The result of the block + # + def track_duration_of(&block) + start_time = Time.now + yield + ensure + duration = ((Time.now - start_time) * 1000).to_i + track_duration(duration) + end + + # + # Track time to first token + # + # @param duration [Integer] The duration in milliseconds + # + def track_time_to_first_token(time_to_first_token) + @summary.time_to_first_token = time_to_first_token + @ld_client.track( + '$ld:ai:tokens:ttf', + @context, + flag_data, + time_to_first_token + ) + end + + # + # Track user feedback + # + # @param kind [Symbol] The kind of feedback (:positive or :negative) + # + def track_feedback(kind:) + @summary.feedback = kind + event_name = kind == :positive ? '$ld:ai:feedback:user:positive' : '$ld:ai:feedback:user:negative' + @ld_client.track( + event_name, + @context, + flag_data, + 1 + ) + end + + # + # Track a successful AI generation + # + def track_success + @summary.success = true + @ld_client.track( + '$ld:ai:generation', + @context, + flag_data, + 1 + ) + @ld_client.track( + '$ld:ai:generation:success', + @context, + flag_data, + 1 + ) + end + + # + # Track an error in AI generation + # + def track_error + @summary.success = false + @ld_client.track( + '$ld:ai:generation', + @context, + flag_data, + 1 + ) + @ld_client.track( + '$ld:ai:generation:error', + @context, + flag_data, + 1 + ) + end + + # + # Track token usage + # + # @param token_usage [TokenUsage] An object containing token usage details + # + def track_tokens(token_usage) + @summary.usage = token_usage + if token_usage.total.positive? + @ld_client.track( + '$ld:ai:tokens:total', + @context, + flag_data, + token_usage.total + ) + end + if token_usage.input.positive? + @ld_client.track( + '$ld:ai:tokens:input', + @context, + flag_data, + token_usage.input + ) + end + return unless token_usage.output.positive? + + @ld_client.track( + '$ld:ai:tokens:output', + @context, + flag_data, + token_usage.output + ) + end + + # + # Track OpenAI-specific operations. + # This method tracks the duration, token usage, and success/error status. + # If the provided block raises, this method will also raise. + # A failed operation will not have any token usage data. + # + # @yield The block to track. + # @return The result of the tracked block. + # + def track_openai_metrics(&block) + result = track_duration_of(&block) + track_success + track_tokens(openai_to_token_usage(result[:usage])) if result[:usage] + result + rescue StandardError + track_error + raise + end + + # + # Track AWS Bedrock conversation operations. + # This method tracks the duration, token usage, and success/error status. + # + # @yield The block to track. + # @return [Hash] The original response hash. + # + def track_bedrock_converse_metrics(&block) + result = track_duration_of(&block) + track_success + track_tokens(bedrock_to_token_usage(result[:usage])) if result[:usage] + result + rescue StandardError + track_error + raise + end + + private def flag_data + { variationKey: @variation_key, configKey: @config_key, version: @version } + end + + private def openai_to_token_usage(usage) + TokenUsage.new( + total: usage[:total_tokens] || usage['total_tokens'], + input: usage[:prompt_tokens] || usage['prompt_tokens'], + output: usage[:completion_tokens] || usage['completion_tokens'] + ) + end + + private def bedrock_to_token_usage(usage) + TokenUsage.new( + total: usage[:total_tokens] || usage['total_tokens'], + input: usage[:input_tokens] || usage['input_tokens'], + output: usage[:output_tokens] || usage['output_tokens'] + ) + end + end + end + end +end diff --git a/lib/server/ai/client.rb b/lib/server/ai/client.rb new file mode 100644 index 0000000..073f092 --- /dev/null +++ b/lib/server/ai/client.rb @@ -0,0 +1,205 @@ +# frozen_string_literal: true + +require 'ldclient-rb' +require 'mustache' +require_relative 'ai_config_tracker' + +module LaunchDarkly + # + # Namespace for the LaunchDarkly Server SDK + # + module Server + # + # Namespace for the LaunchDarkly Server AI SDK. + # + module AI + # + # Holds AI role and content. + # + class Message + attr_reader :role, :content + + def initialize(role, content) + @role = role + @content = content + end + + def to_h + { + role: @role, + content: @content, + } + end + end + + # + # The ModelConfig class represents an AI model configuration. + # + class ModelConfig + attr_reader :name + + def initialize(name:, parameters: {}, custom: {}) + @name = name + @parameters = parameters + @custom = custom + end + + # + # Retrieve model-specific parameters. + # + # Accessing a named, typed attribute (e.g. name) will result in the call + # being delegated to the appropriate property. + # + # @param key [String] The parameter key to retrieve + # @return [Object, nil] The parameter value or nil if not found + # + def parameter(key) + return @name if key == 'name' + return nil unless @parameters.is_a?(Hash) + + @parameters[key] + end + + # + # Retrieve customer provided data. + # + # @param key [String] The custom key to retrieve + # @return [Object, nil] The custom value or nil if not found + # + def custom(key) + return nil unless @custom.is_a?(Hash) + + @custom[key] + end + + def to_h + { + name: @name, + parameters: @parameters, + custom: @custom, + } + end + end + + # + # Configuration related to the provider. + # + class ProviderConfig + attr_reader :name + + def initialize(name) + @name = name + end + + def to_h + { + name: @name, + } + end + end + + # + # The AIConfig class represents an AI configuration. + # + class AIConfig + attr_reader :enabled, :messages, :variables, :tracker, :model, :provider + + def initialize(enabled: nil, model: nil, messages: nil, tracker: nil, provider: nil) + @enabled = enabled + @messages = messages + @tracker = tracker + @model = model + @provider = provider + end + + def to_h + { + _ldMeta: { + enabled: @enabled || false, + }, + messages: @messages.is_a?(Array) ? @messages.map { |msg| msg&.to_h } : nil, + model: @model&.to_h, + provider: @provider&.to_h, + } + end + end + + # + # The Client class is the main entry point for the LaunchDarkly AI SDK. + # + class Client + attr_reader :logger, :ld_client + + def initialize(ld_client) + raise ArgumentError, 'LDClient instance is required' unless ld_client.is_a?(LaunchDarkly::LDClient) + + @ld_client = ld_client + @logger = LaunchDarkly::Server::AI.default_logger + end + + # + # Retrieves the AIConfig + # + # @param config_key [String] The key of the configuration flag + # @param context [LDContext] The context used when evaluating the flag + # @param default_value [AIConfig] The default value to use if the flag is not found + # @param variables [Hash] Optional variables for rendering messages + # @return [AIConfig] An AIConfig instance containing the configuration data + # + def config(config_key, context, default_value = nil, variables = nil) + variation = @ld_client.variation( + config_key, + context, + default_value.respond_to?(:to_h) ? default_value.to_h : nil + ) + + all_variables = variables ? variables.dup : {} + all_variables[:ldctx] = context.to_h + + # Process messages and provider configuration + messages = nil + if variation[:messages].is_a?(Array) && variation[:messages].all? { |msg| msg.is_a?(Hash) } + messages = variation[:messages].map do |message| + next unless message[:content].is_a?(String) + + Message.new( + message[:role], + Mustache.render(message[:content], all_variables) + ) + end + end + + if (provider_config = variation[:provider]) && provider_config.is_a?(Hash) + provider_config = ProviderConfig.new(provider_config.fetch(:name, '')) + end + + if (model = variation[:model]) && model.is_a?(Hash) + parameters = variation[:model][:parameters] + custom = variation[:model][:custom] + model = ModelConfig.new( + name: variation[:model][:name], + parameters: parameters, + custom: custom + ) + end + + tracker = LaunchDarkly::Server::AI::AIConfigTracker.new( + ld_client: @ld_client, + variation_key: variation.dig(:_ldMeta, :variationKey) || '', + config_key: config_key, + version: variation.dig(:_ldMeta, :version) || 1, + context: context + ) + + AIConfig.new( + enabled: variation.dig(:_ldMeta, :enabled) || false, + messages: messages, + tracker: tracker, + model: model, + provider: provider_config + ) + end + end + end + end +end diff --git a/lib/server/ai/version.rb b/lib/server/ai/version.rb new file mode 100644 index 0000000..0fcda51 --- /dev/null +++ b/lib/server/ai/version.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +module LaunchDarkly + module Server + module AI + VERSION = '0.0.0' # x-release-please-version + end + end +end diff --git a/spec/server/ai/ai_spec.rb b/spec/server/ai/ai_spec.rb new file mode 100644 index 0000000..e0733a6 --- /dev/null +++ b/spec/server/ai/ai_spec.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +require 'launchdarkly-server-sdk-ai' + +RSpec.describe LaunchDarkly::Server::AI do + it 'has a version number' do + expect(LaunchDarkly::Server::AI::VERSION).not_to be_nil + end + + it 'returns a logger' do + logger = described_class.default_logger + expect(logger).to be_a(Logger) + expect(logger.level).to eq(Logger::WARN) + end +end diff --git a/spec/server/ai/client_spec.rb b/spec/server/ai/client_spec.rb new file mode 100644 index 0000000..d5ae0dd --- /dev/null +++ b/spec/server/ai/client_spec.rb @@ -0,0 +1,340 @@ +# frozen_string_literal: true + +require 'launchdarkly-server-sdk' +require 'launchdarkly-server-sdk-ai' + +RSpec.describe LaunchDarkly::Server::AI do + let(:td) do + data_source = LaunchDarkly::Integrations::TestData.data_source + data_source.update(data_source.flag('model-config') + .variations( + { + model: { name: 'fakeModel', parameters: { temperature: 0.5, maxTokens: 4096 }, + custom: { 'extra-attribute': 'value' } }, + provider: { name: 'fakeProvider' }, + messages: [{ role: 'system', content: 'Hello, {{name}}!' }], + _ldMeta: { enabled: true, variationKey: 'abcd', version: 1 }, + }, + :green + ) + .variation_for_all(0)) + + data_source.update(data_source.flag('multiple-messages') + .variations( + { + model: { name: 'fakeModel', parameters: { temperature: 0.7, maxTokens: 8192 } }, + messages: [ + { role: 'system', content: 'Hello, {{name}}!' }, + { role: 'user', content: 'The day is, {{day}}!' }, + ], + _ldMeta: { enabled: true, variationKey: 'abcd', version: 1 }, + }, + :green + ) + .variation_for_all(0)) + + data_source.update(data_source.flag('ctx-interpolation') + .variations( + { + model: { name: 'fakeModel', + parameters: { 'extra-attribute': 'I can be anything I set my mind/type to' } }, + messages: [{ role: 'system', content: 'Hello, {{ldctx.name}}! Is your last name {{ldctx.last}}?' }], + _ldMeta: { enabled: true, variationKey: 'abcd', version: 1 }, + } + ) + .variation_for_all(0)) + + data_source.update(data_source.flag('multi-ctx-interpolation') + .variations( + { + model: { name: 'fakeModel', + parameters: { 'extra-attribute': 'I can be anything I set my mind/type to' } }, + messages: [{ role: 'system', + content: 'Hello, {{ldctx.user.name}}! Do you work for {{ldctx.org.shortname}}?' }], + _ldMeta: { enabled: true, variationKey: 'abcd', version: 1 }, + } + ) + .variation_for_all(0)) + + data_source.update(data_source.flag('off-config') + .variations( + { + model: { name: 'fakeModel', parameters: { temperature: 0.1 } }, + messages: [{ role: 'system', content: 'Hello, {{name}}!' }], + _ldMeta: { enabled: false, variationKey: 'abcd', version: 1 }, + } + ) + .variation_for_all(0)) + + data_source.update(data_source.flag('initial-config-disabled') + .variations( + { + _ldMeta: { enabled: false }, + }, + { + _ldMeta: { enabled: true }, + } + ) + .variation_for_all(0)) + + data_source.update(data_source.flag('initial-config-enabled') + .variations( + { + _ldMeta: { enabled: false }, + }, + { + _ldMeta: { enabled: true }, + } + ) + .variation_for_all(1)) + + data_source + end + + let(:ld_client) do + config = LaunchDarkly::Config.new(data_source: td, send_events: false) + LaunchDarkly::LDClient.new('sdk-key', config) + end + let(:ai_client) { LaunchDarkly::Server::AI::Client.new(ld_client) } + + describe LaunchDarkly::Server::AI::ModelConfig do + it 'delegates to properties' do + model = described_class.new(name: 'fakeModel', parameters: { 'extra-attribute': 'value' }) + expect(model.name).to eq('fakeModel') + expect(model.parameter(:'extra-attribute')).to eq('value') + expect(model.parameter('non-existent')).to be_nil + expect(model.parameter('name')).to eq('fakeModel') + end + + it 'handles custom attributes' do + model = described_class.new(name: 'fakeModel', custom: { 'extra-attribute': 'value' }) + expect(model.name).to eq('fakeModel') + expect(model.custom(:'extra-attribute')).to eq('value') + expect(model.custom('non-existent')).to be_nil + expect(model.custom('name')).to be_nil + end + end + + describe LaunchDarkly::Server::AI::Client do + describe '#initialize' do + it 'initializes with a valid LDClient instance' do + expect(ai_client).to be_a(described_class) + expect(ai_client.ld_client).to eq(ld_client) + end + + it 'raises an error if LDClient is not provided' do + expect { described_class.new(nil) }.to raise_error(ArgumentError, 'LDClient instance is required') + end + + it 'raises an error if LDClient is not an instance of LaunchDarkly::LDClient' do + expect { described_class.new('not a client') }.to raise_error(ArgumentError, 'LDClient instance is required') + end + end + + describe '#config' do + it 'uses default config on invalid flag' do + context = LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user' }) + model = LaunchDarkly::Server::AI::ModelConfig.new(name: 'fakeModel', + parameters: { + temperature: 0.5, maxTokens: 4096 + }) + messages = [LaunchDarkly::Server::AI::Message.new('system', 'Hello, {{name}}!')] + default_config = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: model, + messages: messages + ) + variables = { 'name' => 'World' } + + config = ai_client.config('missing-flag', context, default_config, variables) + expect(config.messages).not_to be_nil + expect(config.messages.length).to be > 0 + expect(config.messages[0].content).to eq('Hello, World!') + expect(config.enabled).to be true + + expect(config.model).not_to be_nil + expect(config.model.name).to eq('fakeModel') + expect(config.model.parameter(:temperature)).to eq(0.5) + expect(config.model.parameter(:maxTokens)).to eq(4096) + end + + it 'interpolates variables in model config messages' do + context = LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user' }) + default_value = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'fakeModel'), + messages: [LaunchDarkly::Server::AI::Message.new('system', 'Hello, {{name}}!')] + ) + variables = { 'name' => 'World' } + + config = ai_client.config('model-config', context, default_value, variables) + expect(config.messages).not_to be_nil + expect(config.messages.length).to be > 0 + expect(config.messages[0].content).to eq('Hello, World!') + expect(config.enabled).to be true + + expect(config.model).not_to be_nil + expect(config.model.name).to eq('fakeModel') + expect(config.model.parameter(:temperature)).to eq(0.5) + expect(config.model.parameter(:maxTokens)).to eq(4096) + end + + it 'returns config with messages interpolated as empty when no variables are provided' do + context = LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user' }) + default_value = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'fakeModel'), + messages: [] + ) + + config = ai_client.config('model-config', context, default_value, {}) + + expect(config.messages).not_to be_nil + expect(config.messages.length).to be > 0 + expect(config.messages[0].content).to eq('Hello, !') + expect(config.enabled).to be true + + expect(config.model).not_to be_nil + expect(config.model.name).to eq('fakeModel') + expect(config.model.parameter(:temperature)).to eq(0.5) + expect(config.model.parameter(:maxTokens)).to eq(4096) + end + + it 'handles provider config correctly' do + context = LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user', name: 'Sandy' }) + default_value = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'fake-model'), + messages: [] + ) + variables = { 'name' => 'World' } + + config = ai_client.config('model-config', context, default_value, variables) + + expect(config.provider).not_to be_nil + expect(config.provider.name).to eq('fakeProvider') + end + + it 'interpolates context variables in messages using ldctx' do + context = LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user', name: 'Sandy', last: 'Beaches' }) + default_value = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'fake-model'), + messages: [] + ) + variables = { 'name' => 'World' } + + config = ai_client.config('ctx-interpolation', context, default_value, variables) + + expect(config.messages).not_to be_nil + expect(config.messages.length).to be > 0 + expect(config.messages[0].content).to eq('Hello, Sandy! Is your last name Beaches?') + expect(config.enabled).to be true + + expect(config.model).not_to be_nil + expect(config.model.name).to eq('fakeModel') + expect(config.model.parameter(:temperature)).to be_nil + expect(config.model.parameter(:maxTokens)).to be_nil + expect(config.model.parameter(:'extra-attribute')).to eq('I can be anything I set my mind/type to') + end + + it 'interpolates variables from multiple contexts in messages using ldctx' do + user_context = LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user', name: 'Sandy' }) + org_context = LaunchDarkly::LDContext.create({ key: 'org-key', kind: 'org', name: 'LaunchDarkly', + shortname: 'LD' }) + context = LaunchDarkly::LDContext.create_multi([user_context, org_context]) + default_value = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'fake-model'), + messages: [] + ) + variables = { 'name' => 'World' } + + config = ai_client.config('multi-ctx-interpolation', context, default_value, variables) + + expect(config.messages).not_to be_nil + expect(config.messages.length).to be > 0 + expect(config.messages[0].content).to eq('Hello, Sandy! Do you work for LD?') + expect(config.enabled).to be true + + expect(config.model).not_to be_nil + expect(config.model.name).to eq('fakeModel') + expect(config.model.parameter(:temperature)).to be_nil + expect(config.model.parameter(:maxTokens)).to be_nil + expect(config.model.parameter(:'extra-attribute')).to eq('I can be anything I set my mind/type to') + end + + it 'handles multiple messages and variable interpolation' do + context = LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user' }) + default_value = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'fake-model'), + messages: [] + ) + variables = { 'name' => 'World', 'day' => 'Monday' } + + config = ai_client.config('multiple-messages', context, default_value, variables) + + expect(config.messages).not_to be_nil + expect(config.messages.length).to be > 0 + expect(config.messages[0].content).to eq('Hello, World!') + expect(config.messages[1].content).to eq('The day is, Monday!') + expect(config.enabled).to be true + + expect(config.model).not_to be_nil + expect(config.model.name).to eq('fakeModel') + expect(config.model.parameter(:temperature)).to eq(0.7) + expect(config.model.parameter(:maxTokens)).to eq(8192) + end + + it 'returns disabled config when flag is off' do + context = LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user' }) + default_value = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'fake-model'), + messages: [] + ) + + config = ai_client.config('off-config', context, default_value, {}) + + expect(config.model).not_to be_nil + expect(config.enabled).to be false + expect(config.model.name).to eq('fakeModel') + expect(config.model.parameter(:temperature)).to eq(0.1) + expect(config.model.parameter(:maxTokens)).to be_nil + end + + it 'returns disabled config with nil model/messages/provider when initial config is disabled' do + context = LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user' }) + default_value = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'fake-model'), + messages: [] + ) + + config = ai_client.config('initial-config-disabled', context, default_value, {}) + + expect(config.enabled).to be false + expect(config.model).to be_nil + expect(config.messages).to be_nil + expect(config.provider).to be_nil + end + + it 'returns enabled config with nil model/messages/provider when initial config is enabled' do + context = LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user' }) + default_value = LaunchDarkly::Server::AI::AIConfig.new( + enabled: false, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'fake-model'), + messages: [] + ) + + config = ai_client.config('initial-config-enabled', context, default_value, {}) + + expect(config.enabled).to be true + expect(config.model).to be_nil + expect(config.messages).to be_nil + expect(config.provider).to be_nil + end + end + end +end diff --git a/spec/server/ai/config_tracker_spec.rb b/spec/server/ai/config_tracker_spec.rb new file mode 100644 index 0000000..8fed843 --- /dev/null +++ b/spec/server/ai/config_tracker_spec.rb @@ -0,0 +1,411 @@ +# frozen_string_literal: true + +require 'launchdarkly-server-sdk' +require 'launchdarkly-server-sdk-ai' + +RSpec.describe LaunchDarkly::Server::AI::AIConfigTracker do + let(:td) do + LaunchDarkly::Integrations::TestData.data_source.update( + LaunchDarkly::Integrations::TestData.data_source.flag('model_config') + .variations( + { + model: { name: 'fakeModel', parameters: { temperature: 0.5, maxTokens: 4096 }, + custom: { 'extra-attribute': 'value' } }, + provider: { name: 'fakeProvider' }, + messages: [{ role: 'system', content: 'Hello, {{name}}!' }], + _ldMeta: { enabled: true, variationKey: 'abcd', version: 1 }, + }, + 'green' + ) + .variation_for_all(0) + ) + end + + let(:ld_client) do + config = LaunchDarkly::Config.new(data_source: td, send_events: false) + LaunchDarkly::LDClient.new('sdk-key', config) + end + + let(:context) { LaunchDarkly::LDContext.create({ key: 'user-key', kind: 'user' }) } + let(:tracker_flag_data) { { variationKey: 'test-variation', configKey: 'test-config', version: 1 } } + let(:tracker) do + described_class.new( + ld_client: ld_client, + config_key: tracker_flag_data[:configKey], + context: context, + variation_key: tracker_flag_data[:variationKey], + version: tracker_flag_data[:version] + ) + end + + describe '#track_duration' do + it 'tracks duration with correct event name and data' do + expect(ld_client).to receive(:track).with( + '$ld:ai:duration:total', + context, + tracker_flag_data, + 100 + ) + tracker.track_duration(100) + expect(tracker.summary.duration).to eq(100) + end + end + + describe '#track_duration_of' do + it 'tracks duration of a block and returns its result' do + expect(ld_client).to receive(:track).with( + '$ld:ai:duration:total', + context, + tracker_flag_data, + kind_of(Integer) + ) + result = tracker.track_duration_of { sleep(0.01) } + expect(result).to be_within(10).of(0) # Allow some tolerance for sleep timing + expect(tracker.summary.duration).to be_within(1000).of(10) # Allow some tolerance for sleep timing + end + + it 'tracks duration even when an exception is raised' do + expect(ld_client).to receive(:track).with( + '$ld:ai:duration:total', + context, + tracker_flag_data, + kind_of(Integer) + ) + + expect do + tracker.track_duration_of do + sleep(0.01) + raise 'Something went wrong' + end + end.to raise_error('Something went wrong') + expect(tracker.summary.duration).to be_within(1000).of(10) # Allow some tolerance for sleep timing + end + end + + describe '#track_time_to_first_token' do + it 'tracks time to first token' do + expect(ld_client).to receive(:track).with( + '$ld:ai:tokens:ttf', + context, + tracker_flag_data, + 100 + ) + tracker.track_time_to_first_token(100) + expect(tracker.summary.time_to_first_token).to eq(100) + end + end + + describe '#track_tokens' do + it 'tracks token usage' do + expect(ld_client).to receive(:track).with( + '$ld:ai:tokens:total', + context, + tracker_flag_data, + 300 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:tokens:input', + context, + tracker_flag_data, + 200 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:tokens:output', + context, + tracker_flag_data, + 100 + ) + tokens = LaunchDarkly::Server::AI::TokenUsage.new(total: 300, input: 200, output: 100) + tracker.track_tokens(tokens) + expect(tracker.summary.usage).to eq(tokens) + end + end + + describe '#track_bedrock_metrics' do + let(:bedrock_result) do + { + usage: { + total_tokens: 300, + input_tokens: 200, + output_tokens: 100, + }, + } + end + + it 'tracks duration and tokens' do + expect(ld_client).to receive(:track).with( + '$ld:ai:generation', + context, + tracker_flag_data, + 1 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:generation:success', + context, + tracker_flag_data, + 1 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:duration:total', + context, + tracker_flag_data, + kind_of(Integer) + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:tokens:total', + context, + tracker_flag_data, + 300 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:tokens:input', + context, + tracker_flag_data, + 200 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:tokens:output', + context, + tracker_flag_data, + 100 + ) + + result = tracker.track_bedrock_converse_metrics { bedrock_result } + expect(result).to eq(bedrock_result) + expect(tracker.summary).to be_a(LaunchDarkly::Server::AI::MetricSummary) + expect(tracker.summary.usage).to be_a(LaunchDarkly::Server::AI::TokenUsage) + expect(tracker.summary.usage.total).to eq(300) + expect(tracker.summary.usage.input).to eq(200) + expect(tracker.summary.usage.output).to eq(100) + expect(tracker.summary.duration).to be_a(Integer) + expect(tracker.summary.duration).to be >= 0 + expect(tracker.summary.success).to be true + end + + it 'tracks error for failed operation' do + expect(ld_client).to receive(:track).with( + '$ld:ai:generation', + context, + tracker_flag_data, + 1 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:generation:error', + context, + tracker_flag_data, + 1 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:duration:total', + context, + tracker_flag_data, + kind_of(Integer) + ) + + expect { tracker.track_bedrock_converse_metrics { raise 'test error' } }.to raise_error('test error') + expect(tracker.summary.usage).to be_nil + expect(tracker.summary.duration).to be_a(Integer) + expect(tracker.summary.duration).to be >= 0 + expect(tracker.summary.success).to be false + end + end + + describe '#track_openai_metrics' do + let(:openai_result) do + { + usage: { + total_tokens: 300, + prompt_tokens: 200, + completion_tokens: 100, + }, + } + end + + it 'tracks duration and tokens for successful operation' do + expect(ld_client).to receive(:track).with( + '$ld:ai:duration:total', + context, + tracker_flag_data, + kind_of(Integer) + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:tokens:total', + context, + tracker_flag_data, + 300 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:tokens:input', + context, + tracker_flag_data, + 200 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:tokens:output', + context, + tracker_flag_data, + 100 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:generation', + context, + tracker_flag_data, + 1 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:generation:success', + context, + tracker_flag_data, + 1 + ) + + result = tracker.track_openai_metrics { openai_result } + expect(result).to eq(openai_result) + expect(tracker.summary.usage.total).to eq(300) + expect(tracker.summary.usage.input).to eq(200) + expect(tracker.summary.usage.output).to eq(100) + expect(tracker.summary.duration).to be_a(Integer) + expect(tracker.summary.duration).to be >= 0 + expect(tracker.summary.success).to be true + end + + it 'tracks error for failed operation' do + expect(ld_client).to receive(:track).with( + '$ld:ai:duration:total', + context, + tracker_flag_data, + kind_of(Integer) + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:generation', + context, + tracker_flag_data, + 1 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:generation:error', + context, + tracker_flag_data, + 1 + ) + + expect { tracker.track_openai_metrics { raise 'test error' } }.to raise_error('test error') + expect(tracker.summary.usage).to be_nil + expect(tracker.summary.duration).to be_a(Integer) + expect(tracker.summary.duration).to be >= 0 + expect(tracker.summary.success).to be false + end + end + + describe '#track_feedback' do + it 'tracks positive feedback' do + expect(ld_client).to receive(:track).with( + '$ld:ai:feedback:user:positive', + context, + tracker_flag_data, + 1 + ) + tracker.track_feedback(kind: :positive) + end + + it 'tracks negative feedback' do + expect(ld_client).to receive(:track).with( + '$ld:ai:feedback:user:negative', + context, + tracker_flag_data, + 1 + ) + tracker.track_feedback(kind: :negative) + end + end + + describe '#track_success' do + it 'tracks generation and success events' do + expect(ld_client).to receive(:track).with( + '$ld:ai:generation', + context, + tracker_flag_data, + 1 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:generation:success', + context, + tracker_flag_data, + 1 + ) + tracker.track_success + expect(tracker.summary.success).to be true + end + end + + describe '#track_error' do + it 'tracks generation and error events' do + expect(ld_client).to receive(:track).with( + '$ld:ai:generation', + context, + tracker_flag_data, + 1 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:generation:error', + context, + tracker_flag_data, + 1 + ) + tracker.track_error + expect(tracker.summary.success).to be false + end + + it 'overwrites success with error if both are tracked' do + expect(ld_client).to receive(:track).with( + '$ld:ai:generation', + context, + tracker_flag_data, + 1 + ).twice + expect(ld_client).to receive(:track).with( + '$ld:ai:generation:success', + context, + tracker_flag_data, + 1 + ) + expect(ld_client).to receive(:track).with( + '$ld:ai:generation:error', + context, + tracker_flag_data, + 1 + ) + + tracker.track_success + expect(tracker.summary.success).to be true + tracker.track_error + expect(tracker.summary.success).to be false + end + end + + describe '#summary' do + it 'returns a summary of tracked metrics' do + tracker.track_duration(100) + tracker.track_feedback(kind: :positive) + tracker.track_tokens(LaunchDarkly::Server::AI::TokenUsage.new(total: 100, input: 50, output: 50)) + tracker.track_success + tracker.track_time_to_first_token(50) + + expect(tracker.summary.duration).to eq(100) + expect(tracker.summary.feedback).to eq(:positive) + expect(tracker.summary.usage.total).to eq(100) + expect(tracker.summary.usage.input).to eq(50) + expect(tracker.summary.usage.output).to eq(50) + expect(tracker.summary.success).to be true + expect(tracker.summary.time_to_first_token).to eq(50) + end + + it 'returns nil for untracked metrics' do + expect(tracker.summary.duration).to be_nil + expect(tracker.summary.feedback).to be_nil + expect(tracker.summary.usage).to be_nil + expect(tracker.summary.success).to be_nil + expect(tracker.summary.time_to_first_token).to be_nil + end + end +end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb new file mode 100644 index 0000000..7975e16 --- /dev/null +++ b/spec/spec_helper.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +# require 'ldclient-ai' + +RSpec.configure do |config| + # Enable flags like --only-failures and --next-failure + config.example_status_persistence_file_path = '.rspec_status' + + # Disable RSpec exposing methods globally on `Module` and `main` + config.disable_monkey_patching! + + config.expect_with :rspec do |c| + c.syntax = :expect + end +end