diff --git a/examples/chatbot/README.md b/examples/chatbot/README.md new file mode 100644 index 0000000..c86f8a8 --- /dev/null +++ b/examples/chatbot/README.md @@ -0,0 +1,46 @@ +# LaunchDarkly sample Ruby AI application + +We've built a simple console application that demonstrates how LaunchDarkly's Ruby AI SDK works. + +Below, you'll find the build procedure. For more comprehensive instructions, you can visit your [Quickstart page](https://docs.launchdarkly.com/home/ai-configs/quickstart) or the [Ruby reference guide](https://docs.launchdarkly.com/sdk/ai/ruby). + +This demo requires Ruby 3.0 or higher. + +## Build Instructions + +This repository includes examples for `OpenAI` and `Bedrock`. Depending on your preferred provider, you may have to take some additional steps. + +### General setup + +1. Install the required dependencies with `bundle install` in the appropriate example directory. +1. Set the environment variable `LAUNCHDARKLY_SDK_KEY` to your LaunchDarkly SDK key. If there is an existing an AI Config in your LaunchDarkly project that you want to evaluate, set `LAUNCHDARKLY_AI_CONFIG_KEY` to the flag key; otherwise, an AI Config of `sample-ai-config` will be assumed. + + ```bash + export LAUNCHDARKLY_SDK_KEY="1234567890abcdef" + export LAUNCHDARKLY_AI_CONFIG_KEY="sample-ai-config" + ``` + +1. Replace `my-default-model` with your preferred model if the application cannot connect to LaunchDarkly Services. + +### OpenAI setup + +1. Set the environment variable `OPENAI_API_KEY` to your OpenAI key. + + ```bash + export OPENAI_API_KEY="0987654321fedcba" + ``` + +1. Run the program `bundle exec ruby hello_openai.rb` + +### Bedrock setup + +1. Ensure the required AWS credentials can be [auto-detected by the AWS client][aws-configuration]. In the provided example we use the following environment variables. + + ```bash + export AWS_ACCESS_KEY_ID="0987654321fedcba" + export AWS_SECRET_ACCESS_KEY="0987654321fedcba" + ``` + +1. Run the program `bundle exec ruby hello_bedrock.rb` + +[aws-configuration]: https://docs.aws.amazon.com/sdk-for-ruby/v3/developer-guide/configuring.html#precedence-settings \ No newline at end of file diff --git a/examples/chatbot/aws-bedrock/Gemfile b/examples/chatbot/aws-bedrock/Gemfile new file mode 100644 index 0000000..b97f8b4 --- /dev/null +++ b/examples/chatbot/aws-bedrock/Gemfile @@ -0,0 +1,8 @@ +# frozen_string_literal: true + +source 'https://rubygems.org' + +ruby '>= 3.0.0' + +gem 'aws-sdk-bedrockruntime', '~> 1.0' +gem 'launchdarkly-server-sdk-ai', path: '../../..' diff --git a/examples/chatbot/aws-bedrock/hello_bedrock.rb b/examples/chatbot/aws-bedrock/hello_bedrock.rb new file mode 100644 index 0000000..ffe6527 --- /dev/null +++ b/examples/chatbot/aws-bedrock/hello_bedrock.rb @@ -0,0 +1,144 @@ +# frozen_string_literal: true + +require 'aws-sdk-bedrockruntime' +require 'launchdarkly-server-sdk' +require 'launchdarkly-server-sdk-ai' + +# Set sdk_key to your LaunchDarkly SDK key. +sdk_key = ENV['LAUNCHDARKLY_SDK_KEY'] + +# Set config_key to the AI Config key you want to evaluate. +ai_config_key = ENV['LAUNCHDARKLY_AI_CONFIG_KEY'] || 'sample-ai-config' + +# Set aws_access_key_id and aws_secret_access_key for AWS credentials. +aws_access_key_id = ENV['AWS_ACCESS_KEY_ID'] +aws_secret_access_key = ENV['AWS_SECRET_ACCESS_KEY'] +region = ENV['AWS_REGION'] || 'us-east-1' + +if sdk_key.nil? || sdk_key.empty? + puts '*** Please set the LAUNCHDARKLY_SDK_KEY env first' + exit 1 +end + +if aws_access_key_id.nil? || aws_access_key_id.empty? + puts '*** Please set the AWS_ACCESS_KEY_ID env variable first' + exit 1 +end + +if aws_secret_access_key.nil? || aws_secret_access_key.empty? + puts '*** Please set the AWS_SECRET_ACCESS_KEY env variable first' + exit 1 +end + +# +# Chatbot class that interacts with LaunchDarkly AI and AWS Bedrock +# +class BedrockChatbot + attr_reader :aiclient, :ai_config_key, :bedrock_client + + DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'my-default-model'), + messages: [ + LaunchDarkly::Server::AI::Message.new('system', + 'You are a default unhelpful assistant with the persona of HAL 9000 talking with {{ldctx.name}}'), + LaunchDarkly::Server::AI::Message.new('user', '{{user_question}}'), + ] + ) + + def initialize(aiclient, ai_config_key, bedrock_client, context) + @aiclient = aiclient + @ai_config_key = ai_config_key + @bedrock_client = bedrock_client + @context = context + end + + def ask_agent(question) + ai_config = aiclient.config( + @ai_config_key, + @context, + DEFAULT_VALUE, + { user_question: question } + ) + + begin + response = ai_config.tracker.track_bedrock_converse_metrics do + @bedrock_client.converse( + map_converse_arguments( + ai_config.model.name, + ai_config.messages + ) + ) + end + [response.output.message.content[0].text, ai_config.tracker] + rescue StandardError => e + ["An error occured: #{e.message}", nil] + end + end + + def agent_was_helpful(tracker, helpful) + kind = helpful ? :positive : :negative + tracker.track_feedback(kind: kind) + end + + def map_converse_arguments(model_id, messages) + args = { + model_id: model_id, + } + + mapped_messages = [] + user_messages = messages.select { |msg| msg.role == 'user' } + mapped_messages << { role: 'user', content: user_messages.map { |msg| { text: msg.content } } } unless user_messages.empty? + + assistant_messages = messages.select { |msg| msg.role == 'assistant' } + mapped_messages << { role: 'assistant', content: assistant_messages.map { |msg| { text: msg.content } } } unless assistant_messages.empty? + args[:messages] = mapped_messages unless mapped_messages.empty? + + system_messages = messages.select { |msg| msg.role == 'system' } + args[:system] = system_messages.map { |msg| { text: msg.content } } unless system_messages.empty? + + args + end +end + +# Initialize the LaunchDarkly client +ld_client = LaunchDarkly::LDClient.new(sdk_key) +ai_client = LaunchDarkly::Server::AI::Client.new(ld_client) + +unless ld_client.initialized? + puts '*** SDK failed to initialize!' + exit 1 +end + +# Create the LDContext +context = LaunchDarkly::LDContext.create({ + key: 'user-key', + kind: 'user', + name: 'Lucy', + }) + +bedrock_client = Aws::BedrockRuntime::Client.new( + aws_access_key_id: aws_access_key_id, + aws_secret_access_key: aws_secret_access_key, + region: region +) +chatbot = BedrockChatbot.new(ai_client, ai_config_key, bedrock_client, context) + +loop do + print "Ask a question: (or type 'exit'): " + question = gets&.chomp + break if question.nil? || question.strip.downcase == 'exit' + + response, tracker = chatbot.ask_agent(question) + puts "AI Response: #{response}" + + next if tracker.nil? # If tracker is nil, skip feedback collection + + print "Was the response helpful? [yes/no] (or type 'exit'): " + feedback = gets&.chomp + break if feedback.nil? || feedback.strip.downcase == 'exit' + + chatbot.agent_was_helpful(tracker, feedback == 'yes') +end + +ld_client.close \ No newline at end of file diff --git a/examples/hello-bedrock/Gemfile b/examples/chatbot/openai/Gemfile similarity index 72% rename from examples/hello-bedrock/Gemfile rename to examples/chatbot/openai/Gemfile index 83b1714..a71a5fd 100644 --- a/examples/hello-bedrock/Gemfile +++ b/examples/chatbot/openai/Gemfile @@ -2,5 +2,7 @@ source 'https://rubygems.org' -gem 'aws-sdk-bedrockruntime' +ruby '>= 3.0.0' + gem 'launchdarkly-server-sdk-ai', path: '../../..' +gem 'openai', '~> 0.7.0' diff --git a/examples/chatbot/openai/hello_openai.rb b/examples/chatbot/openai/hello_openai.rb new file mode 100644 index 0000000..9fc6fe6 --- /dev/null +++ b/examples/chatbot/openai/hello_openai.rb @@ -0,0 +1,113 @@ +# frozen_string_literal: true + +require 'launchdarkly-server-sdk' +require 'launchdarkly-server-sdk-ai' +require 'openai' + +# Set sdk_key to your LaunchDarkly SDK key. +sdk_key = ENV['LAUNCHDARKLY_SDK_KEY'] + +# Set config_key to the AI Config key you want to evaluate. +ai_config_key = ENV['LAUNCHDARKLY_AI_CONFIG_KEY'] || 'sample-ai-config' + +# Set openai_api_key to your OpenAI API key. +openai_api_key = ENV['OPENAI_API_KEY'] + +if sdk_key.nil? || sdk_key.empty? + puts '*** Please set the LAUNCHDARKLY_SDK_KEY env first' + exit 1 +end + +if openai_api_key.nil? || openai_api_key.empty? + puts '*** Please set the OPENAI_API_KEY env first' + exit 1 +end + +# +# Chatbot class that interacts with LaunchDarkly AI and OpenAI +# +class Chatbot + attr_reader :aiclient, :ai_config_key, :openai_client, :context + + DEFAULT_VALUE = LaunchDarkly::Server::AI::AIConfig.new( + enabled: true, + model: LaunchDarkly::Server::AI::ModelConfig.new(name: 'my-default-model'), + messages: [ + LaunchDarkly::Server::AI::Message.new('system', + 'You are a default unhelpful assistant with the persona of HAL 9000 talking with {{ldctx.name}}'), + LaunchDarkly::Server::AI::Message.new('user', '{{user_question}}'), + ] + ) + + def initialize(aiclient, ai_config_key, openai_client, context) + @aiclient = aiclient + @ai_config_key = ai_config_key + @openai_client = openai_client + @context = context + end + + def ask_agent(question) + ai_config = aiclient.config( + @ai_config_key, + @context, + DEFAULT_VALUE, + { user_question: question } + ) + + begin + completion = ai_config.tracker.track_openai_metrics do + @openai_client.chat.completions.create( + model: ai_config.model.name, + messages: ai_config.messages.map(&:to_h) + ) + end + [completion[:choices][0][:message][:content], ai_config.tracker] + rescue StandardError => e + ["An error occurred: #{e.message}", nil] + end + end + + def agent_was_helpful(tracker, helpful) + kind = helpful ? :positive : :negative + tracker.track_feedback(kind: kind) + end +end + +ld_client = LaunchDarkly::LDClient.new(sdk_key) +ai_client = LaunchDarkly::Server::AI::Client.new(ld_client) + +unless ld_client.initialized? + puts '*** SDK failed to initialize!' + exit 1 +end + +puts '*** SDK successfully initialized' + +# Create the LDContext +context = LaunchDarkly::LDContext.create({ + key: 'user-key', + kind: 'user', + name: 'Lucy', + }) + +chatbot = Chatbot.new(ai_client, ai_config_key, OpenAI::Client.new(api_key: openai_api_key), context) + +loop do + print "Ask a question (or type 'exit'): " + input = gets&.chomp + break if input.nil? || input.strip.downcase == 'exit' + + response, tracker = chatbot.ask_agent(input) + puts "AI Response: #{response}" + + next if tracker.nil? # If tracker is nil, skip feedback collection + + print "Was the response helpful? [yes/no] (or type 'exit'): " + feedback = gets&.chomp + break if feedback.nil? || feedback.strip.downcase == 'exit' + + helpful = feedback.strip.downcase == 'yes' + chatbot.agent_was_helpful(tracker, helpful) +end + +ld_client.close \ No newline at end of file