Skip to content
This repository was archived by the owner on Apr 1, 2025. It is now read-only.

Commit d3c4ba9

Browse files
🌿 Fern Regeneration -- July 10, 2024 (#41)
1 parent 64410b4 commit d3c4ba9

File tree

4 files changed

+8
-12
lines changed

4 files changed

+8
-12
lines changed

lib/assemblyai/lemur/client.rb

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ def initialize(request_client:)
3333
# Use either transcript_ids or input_text as input into LeMUR.
3434
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
3535
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
36-
# Defaults to "default".
3736
# @param max_output_size [Integer] Max output size in tokens, up to 4000
3837
# @param temperature [Float] The temperature to use for the model.
3938
# Higher values result in answers that are more creative, lower values are more
@@ -84,7 +83,6 @@ def task(prompt:, transcript_ids: nil, input_text: nil, context: nil, final_mode
8483
# Use either transcript_ids or input_text as input into LeMUR.
8584
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
8685
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
87-
# Defaults to "default".
8886
# @param max_output_size [Integer] Max output size in tokens, up to 4000
8987
# @param temperature [Float] The temperature to use for the model.
9088
# Higher values result in answers that are more creative, lower values are more
@@ -136,7 +134,6 @@ def summary(transcript_ids: nil, input_text: nil, context: nil, final_model: nil
136134
# Use either transcript_ids or input_text as input into LeMUR.
137135
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
138136
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
139-
# Defaults to "default".
140137
# @param max_output_size [Integer] Max output size in tokens, up to 4000
141138
# @param temperature [Float] The temperature to use for the model.
142139
# Higher values result in answers that are more creative, lower values are more
@@ -187,7 +184,6 @@ def question_answer(questions:, transcript_ids: nil, input_text: nil, context: n
187184
# Use either transcript_ids or input_text as input into LeMUR.
188185
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
189186
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
190-
# Defaults to "default".
191187
# @param max_output_size [Integer] Max output size in tokens, up to 4000
192188
# @param temperature [Float] The temperature to use for the model.
193189
# Higher values result in answers that are more creative, lower values are more
@@ -294,7 +290,6 @@ def initialize(request_client:)
294290
# Use either transcript_ids or input_text as input into LeMUR.
295291
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
296292
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
297-
# Defaults to "default".
298293
# @param max_output_size [Integer] Max output size in tokens, up to 4000
299294
# @param temperature [Float] The temperature to use for the model.
300295
# Higher values result in answers that are more creative, lower values are more
@@ -347,7 +342,6 @@ def task(prompt:, transcript_ids: nil, input_text: nil, context: nil, final_mode
347342
# Use either transcript_ids or input_text as input into LeMUR.
348343
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
349344
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
350-
# Defaults to "default".
351345
# @param max_output_size [Integer] Max output size in tokens, up to 4000
352346
# @param temperature [Float] The temperature to use for the model.
353347
# Higher values result in answers that are more creative, lower values are more
@@ -401,7 +395,6 @@ def summary(transcript_ids: nil, input_text: nil, context: nil, final_model: nil
401395
# Use either transcript_ids or input_text as input into LeMUR.
402396
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
403397
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
404-
# Defaults to "default".
405398
# @param max_output_size [Integer] Max output size in tokens, up to 4000
406399
# @param temperature [Float] The temperature to use for the model.
407400
# Higher values result in answers that are more creative, lower values are more
@@ -454,7 +447,6 @@ def question_answer(questions:, transcript_ids: nil, input_text: nil, context: n
454447
# Use either transcript_ids or input_text as input into LeMUR.
455448
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
456449
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
457-
# Defaults to "default".
458450
# @param max_output_size [Integer] Max output size in tokens, up to 4000
459451
# @param temperature [Float] The temperature to use for the model.
460452
# Higher values result in answers that are more creative, lower values are more

lib/assemblyai/lemur/types/lemur_base_params.rb

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ class LemurBaseParams
1919
# @return [AssemblyAI::Lemur::LemurBaseParamsContext] Context to provide the model. This can be a string or a free-form JSON value.
2020
attr_reader :context
2121
# @return [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
22-
# Defaults to "default".
2322
attr_reader :final_model
2423
# @return [Integer] Max output size in tokens, up to 4000
2524
attr_reader :max_output_size
@@ -44,7 +43,6 @@ class LemurBaseParams
4443
# Use either transcript_ids or input_text as input into LeMUR.
4544
# @param context [AssemblyAI::Lemur::LemurBaseParamsContext] Context to provide the model. This can be a string or a free-form JSON value.
4645
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
47-
# Defaults to "default".
4846
# @param max_output_size [Integer] Max output size in tokens, up to 4000
4947
# @param temperature [Float] The temperature to use for the model.
5048
# Higher values result in answers that are more creative, lower values are more

lib/assemblyai/lemur/types/lemur_model.rb

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,16 @@ module AssemblyAI
44
class Lemur
55
# The model that is used for the final prompt after compression is performed.
66
class LemurModel
7+
ANTHROPIC_CLAUDE3_5_SONNET = "anthropic/claude-3-5-sonnet"
8+
ANTHROPIC_CLAUDE3_OPUS = "anthropic/claude-3-opus"
9+
ANTHROPIC_CLAUDE3_HAIKU = "anthropic/claude-3-haiku"
10+
ANTHROPIC_CLAUDE3_SONNET = "anthropic/claude-3-sonnet"
11+
ANTHROPIC_CLAUDE2_1 = "anthropic/claude-2-1"
12+
ANTHROPIC_CLAUDE2 = "anthropic/claude-2"
713
DEFAULT = "default"
14+
ANTHROPIC_CLAUDE_INSTANT1_2 = "anthropic/claude-instant-1-2"
815
BASIC = "basic"
916
ASSEMBLYAI_MISTRAL7B = "assemblyai/mistral-7b"
10-
ANTHROPIC_CLAUDE2_1 = "anthropic/claude-2-1"
1117
end
1218
end
1319
end

lib/gemconfig.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
module AssemblyAI
44
module Gemconfig
5-
VERSION = "1.0.0-beta.14"
5+
VERSION = "1.0.0-beta.15"
66
AUTHORS = [""].freeze
77
EMAIL = "support@assemblyai.com"
88
SUMMARY = "AssemblyAI Ruby SDK"

0 commit comments

Comments
 (0)