Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 24 additions & 24 deletions Gemfile.lock
Original file line number Diff line number Diff line change
@@ -1,47 +1,47 @@
GEM
remote: https://rubygems.org/
specs:
addressable (2.8.6)
public_suffix (>= 2.0.2, < 6.0)
bigdecimal (3.1.6)
connection_pool (2.4.1)
addressable (2.8.7)
public_suffix (>= 2.0.2, < 7.0)
bigdecimal (3.2.2)
connection_pool (2.5.3)
crack (1.0.0)
bigdecimal
rexml
diff-lcs (1.5.1)
faraday (2.12.0)
faraday-net_http (>= 2.0, < 3.4)
diff-lcs (1.6.2)
faraday (2.13.1)
faraday-net_http (>= 2.0, < 3.5)
json
logger
faraday-net_http (3.3.0)
net-http
faraday-net_http (3.4.0)
net-http (>= 0.5.0)
faraday-net_http_persistent (2.3.0)
faraday (~> 2.5)
net-http-persistent (>= 4.0.4, < 5)
hashdiff (1.1.0)
json (2.7.2)
logger (1.6.1)
net-http (0.4.1)
hashdiff (1.2.0)
json (2.12.2)
logger (1.7.0)
net-http (0.6.0)
uri
net-http-persistent (4.0.4)
connection_pool (~> 2.2)
public_suffix (5.0.4)
rexml (3.2.6)
rspec (3.13.0)
net-http-persistent (4.0.6)
connection_pool (~> 2.2, >= 2.2.4)
public_suffix (6.0.2)
rexml (3.4.1)
rspec (3.13.1)
rspec-core (~> 3.13.0)
rspec-expectations (~> 3.13.0)
rspec-mocks (~> 3.13.0)
rspec-core (3.13.0)
rspec-core (3.13.4)
rspec-support (~> 3.13.0)
rspec-expectations (3.13.0)
rspec-expectations (3.13.5)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.13.0)
rspec-mocks (3.13.0)
rspec-mocks (3.13.5)
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.13.0)
rspec-support (3.13.0)
uri (0.13.1)
webmock (3.22.0)
rspec-support (3.13.4)
uri (1.0.3)
webmock (3.25.1)
addressable (>= 2.8.0)
crack (>= 0.3.2)
hashdiff (>= 0.4.0, < 2.0.0)
Expand Down
20 changes: 20 additions & 0 deletions lib/rubyai.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,31 @@
require 'faraday/net_http_persistent'
require 'json'

require_relative "rubyai/providers/openai"
require_relative "rubyai/providers/anthropic"
require_relative "rubyai/provider"
require_relative "rubyai/client"
require_relative "rubyai/configuration"
require_relative "rubyai/http"
require_relative "rubyai/chat"
require_relative "rubyai/version"

module RubyAI
class Error < StandardError; end

def self.models
Configuration::MODELS
end

def self.chat(config = {})
Client.new(config).call
end

def self.configure
yield config
end

def self.config(config = {})
@config ||= Configuration.new(config)
end
end
41 changes: 41 additions & 0 deletions lib/rubyai/chat.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
module RubyAI
class Chat
attr_accessor :provider, :model, :temperature

def initialize(provider, model: nil, temperature: 0.7)
@provider = provider
@model = model || RubyAI::Configuration::DEFAULT_MODEL
@temperature = temperature
end

def call(messages)
raise ArgumentError, "Messages cannot be empty" if messages.nil? || messages.empty?

body = HTTP.build_body(messages, @provider, @model, @temperature)
headers = HTTP.build_headers(provider, RubyAI.config)

response = connection.post do |req|
req.url Configuration::PROVIDERS[@provider] || Configuration::BASE_URL
req.headers.merge!(headers)
req.body = body.to_json
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Faraday can manage Json automatically

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the same thing as in the previous message

end

JSON.parse(response.body)
end

private

def connection
@connection ||= Faraday.new do |faraday|
faraday.adapter Faraday.default_adapter
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Adapter should come last

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

its just an copy+paste from client, so i couldn't know that it should come last

faraday.headers['Content-Type'] = 'application/json'
end
rescue Faraday::Error => e
raise "Connection error: #{e.message}"
rescue JSON::ParserError => e
raise "Response parsing error: #{e.message}"
rescue StandardError => e
raise "An unexpected error occurred: #{e.message}"
end
end
end
10 changes: 5 additions & 5 deletions lib/rubyai/client.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@ module RubyAI
class Client
attr_reader :configuration

def initialize(config_hash = {})
@configuration = Configuration.new(config_hash)
def initialize(config = {})
@configuration ||= RubyAI.config(config)
end

def call
response = connection.post do |req|
req.url Configuration::BASE_URL
req.headers.merge!(HTTP.build_headers(configuration.api_key))
req.body = HTTP.build_body(configuration.messages, configuration.model, configuration.temperature).to_json
req.url Configuration::PROVIDERS[configuration.provider] || Configuration::BASE_URL
req.headers.merge!(HTTP.build_headers(configuration.provider || RubyAI::Configuration::DEFAULT_PROVIDER, RubyAI.config ))
req.body = HTTP.build_body(configuration.messages, configuration.provider, configuration.model, configuration.temperature).to_json
end

JSON.parse(response.body)
Expand Down
37 changes: 24 additions & 13 deletions lib/rubyai/configuration.rb
Original file line number Diff line number Diff line change
@@ -1,31 +1,42 @@
module RubyAI
class Configuration
BASE_URL = "https://api.openai.com/v1/chat/completions"
PROVIDERS = {
'openai' => "https://api.openai.com/v1/chat/completions",
'anthropic' => "https://api.anthropic.com/v1/chat/completions"
}.freeze

MODELS = PROVIDERS.to_h do |provider, _url|
[provider, Provider[provider].models]
end.freeze

MODELS = {
"gpt-4" => "gpt-4",
"gpt-4-32k" => "gpt-4-32k",
"gpt-4-turbo" => "gpt-4-turbo",
"gpt-4o-mini" => "gpt-4o-mini",
"o1-mini" => "o1-mini",
"o1-preview" => "o1-preview",
"text-davinci-003" => "text-davinci-003"
}

BASE_URL = "https://api.openai.com/v1/chat/completions"

DEFAULT_MODEL = "gpt-3.5-turbo"

attr_accessor :api_key, :model, :messages, :temperature
DEFAULT_PROVIDER = 'openai'

# default values for configuration
attr_accessor :api_key,
:model,
:messages,
:temperature,
:provider,
# :providers
:anthropic_api_key

def initialize(config = {})
@api_key = config[:api_key]
@api_key = config.fetch(:api_key, openai_api_key)
@openai_api_key = config.fetch(:openai_api_key, api_key)
@model = config.fetch(:model, DEFAULT_MODEL)
@messages = config.fetch(:messages, nil)
@temperature = config.fetch(:temperature, 0.7)
@provider = config.fetch(:provider, "openai")
end
end

def self.configuration
@configuration ||= Configuration.new
@configuration ||= RubyAI.config(config = {})
end

def self.configure
Expand Down
46 changes: 38 additions & 8 deletions lib/rubyai/http.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,49 @@ module RubyAI
module HTTP
extend self

def build_body(messages, model, temperature)
{
'model': Configuration::MODELS[model],
def build_body(messages, provider, model, temperature)
case provider
when 'openai'
{
'model': Configuration::MODELS[provider][model],
'messages': [{ "role": "user", "content": messages }],
'temperature': temperature
}
}
when 'anthropic'
{
'model' => Configuration::MODELS[provider][model],
'max_tokens' => 1024, # Required parameter for Anthropic API
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why you stay with 1024 tokens? Can't we increase this?

Copy link
Author

@Reion19 Reion19 Jul 2, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

my bad, will be fixed in future commits, because in one of the next PR i've changed configuration of providers

I simply forgot about this thing.

'messages' => format_messages_for_antropic(messages),
'temperature' => temperature
}
end
end

def build_headers(api_key)
{
def build_headers(provider, config)
case provider
when 'openai'
{
'Content-Type': 'application/json',
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Faraday will add this header automatically

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this line will be removed in future commits

'Authorization': "Bearer #{api_key}"
}
'Authorization': "Bearer #{config.openai_api_key}"
}
when 'anthropic'
{
'x-api-key' => config.anthropic_api_key,
'anthropic-version' => '2023-06-01'
}
end
end

private

def format_messages_for_antropic(messages)
# Messages should be an array of message objects
# Each message needs 'role' (either 'user' or 'assistant') and 'content'
if messages.is_a?(String)
[{ 'role' => 'user', 'content' => messages }]
else
messages
end
end
end
end
15 changes: 15 additions & 0 deletions lib/rubyai/provider.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
module RubyAI
module Provider
PROVIDERS = {
'openai' => RubyAI::Providers::OpenAI,
# doesn't tested yet because i don't have an anthropic api key
'anthropic' => RubyAI::Providers::Anthropic
}

module_function

def [](provider)
PROVIDERS.fetch(provider)
end
end
end
17 changes: 17 additions & 0 deletions lib/rubyai/providers/anthropic.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
module RubyAI
module Providers
# doesn't tested yet because i don't have an anthropic api key
class Anthropic
def self.models = {
"claude-2" => "claude-2",
"claude-instant-100k" => "claude-instant-100k",
"claude-1" => "claude-1",
"claude-1.3" => "claude-1.3",
"claude-1.3-sonnet" => "claude-1.3-sonnet",
"claude-1.3-sonnet-100k" => "claude-1.3-sonnet-100k"
}.freeze
end

# todo: configuration of separate models
end
end
20 changes: 20 additions & 0 deletions lib/rubyai/providers/openai.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
module RubyAI
module Providers
class OpenAI
DEFAULT_MODEL = "gpt-3.5-turbo".freeze

def self.models
{"gpt-3.5-turbo" => "gpt-3.5-turbo",
"gpt-4" => "gpt-4",
"gpt-4-32k" => "gpt-4-32k",
"gpt-4-turbo" => "gpt-4-turbo",
"gpt-4o-mini" => "gpt-4o-mini",
"o1-mini" => "o1-mini",
"o1-preview" => "o1-preview",
"text-davinci-003" => "text-davinci-003" }
end

# todo: configuration of separate models
end
end
end
33 changes: 0 additions & 33 deletions spec/configuration_spec.rb

This file was deleted.

2 changes: 2 additions & 0 deletions spec/rubyai/chat_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
require_relative '../../lib/rubyai/chat'
require 'webmock/rspec'
5 changes: 3 additions & 2 deletions spec/client_spec.rb → spec/rubyai/client_spec.rb
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
require 'webmock/rspec'
require_relative '../lib/rubyai/client.rb'
require_relative '../../lib/rubyai/client.rb'

RSpec.describe RubyAI::Client do
let(:api_key) { 'your_api_key' }
let(:messages) { 'Hello, how are you?' }
let(:temperature) { 0.7 }
let(:model) { 'gpt-3.5-turbo' }
let(:client) { described_class.new(api_key: api_key, messages: messages, temperature: temperature, model: model) }
let(:provider) { 'openai' }
let(:client) { described_class.new(api_key: api_key, messages: messages, temperature: temperature, provider: provider, model: model) }

describe '#call' do
let(:response_body) { { 'completion' => 'This is a response from the model.' } }
Expand Down
Loading