diff --git a/.release-please-manifest.json b/.release-please-manifest.json index e613b816..aff3ead3 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-beta.1" + ".": "0.1.0-beta.2" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index d00e2bb3..2614f4ca 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 99 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-161ca7f1cfd7b33c1fc07d0ce25dfe4be5a7271c394f4cb526b7fb21b0729900.yml -openapi_spec_hash: 602e14add4bee018c6774e320ce309b8 -config_hash: bdacc55eb995c15255ec82130eb8c3bb +configured_endpoints: 109 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-fc64d7c2c8f51f750813375356c3f3fdfc7fc1b1b34f19c20a5410279d445d37.yml +openapi_spec_hash: 618285fc70199ee32b9ebe4bf72f7e4c +config_hash: c497f6b750cc89c0bf2eefc0bc839c70 diff --git a/CHANGELOG.md b/CHANGELOG.md index 74f71588..05abf4ff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,41 @@ # Changelog +## 0.1.0-beta.2 (2025-05-22) + +Full Changelog: [v0.1.0-beta.1...v0.1.0-beta.2](https://github.com/openai/openai-ruby/compare/v0.1.0-beta.1...v0.1.0-beta.2) + +### Features + +* **api:** add container endpoint ([8be52a2](https://github.com/openai/openai-ruby/commit/8be52a2bd618da97c79cb35ada46717965664a08)) +* **api:** further updates for evals API ([ae7a8b8](https://github.com/openai/openai-ruby/commit/ae7a8b8fc1611aa6f645c75f865d9ae6906d9a20)) +* **api:** new API tools ([9105b8b](https://github.com/openai/openai-ruby/commit/9105b8b80d2d381ed58b2b92ecfe644e7596c9a3)) +* **api:** new streaming helpers for background responses ([91a278e](https://github.com/openai/openai-ruby/commit/91a278e6ac4db19c66a89d5f610c22ad3c82a1f7)) +* **api:** Updating Assistants and Evals API schemas ([690b6a7](https://github.com/openai/openai-ruby/commit/690b6a78de30845f974695d0cc36a59a04adf65b)) +* RBI type defs for structured output ([#684](https://github.com/openai/openai-ruby/issues/684)) ([00b25bd](https://github.com/openai/openai-ruby/commit/00b25bdb3aa8a2999114389d699cc3dc59c4089e)) + + +### Bug Fixes + +* correctly instantiate sorbet type aliases for enums and unions ([15a2b2b](https://github.com/openai/openai-ruby/commit/15a2b2bab52948f9dac83560dea419006589bd81)) +* structured output union decorations ([05b69d1](https://github.com/openai/openai-ruby/commit/05b69d1be85f813e1bddf04e4042665383c1be04)) + + +### Chores + +* disable sorbet typecheck for WIP sorbet annotations in examples ([#678](https://github.com/openai/openai-ruby/issues/678)) ([a340356](https://github.com/openai/openai-ruby/commit/a3403566253a74a9f1c69a874568000eca1da656)) +* **docs:** grammar improvements ([c4ef024](https://github.com/openai/openai-ruby/commit/c4ef024f3513e1d64e55960b45660e50d9bf9039)) +* force utf-8 locale via `RUBYOPT` when formatting ([746abf4](https://github.com/openai/openai-ruby/commit/746abf447c01290ad3061ef77c54d3b5d781a6b7)) +* **internal:** version bump ([b35ea63](https://github.com/openai/openai-ruby/commit/b35ea63d9758c4e96dd665013be2edb78ebaa8e6)) +* refine Yard and Sorbet types and ensure linting is turned on for examples ([a16dd00](https://github.com/openai/openai-ruby/commit/a16dd00f99176184da0710a0fbce652718a3d067)) +* use fully qualified names for yard annotations and rbs aliases ([26db76d](https://github.com/openai/openai-ruby/commit/26db76de24d82ebb593997fab8fd8df43c5f2372)) +* use sorbet union aliases where available ([600f499](https://github.com/openai/openai-ruby/commit/600f499dcf61b4d3c3a8cf092ff18cb712711dc0)) + + +### Documentation + +* grammar improvements ([15511fc](https://github.com/openai/openai-ruby/commit/15511fc1e80f61abe64375b0a7eb22c5447d5288)) +* grammar improvements in README.md ([d43db56](https://github.com/openai/openai-ruby/commit/d43db56ba239f91c6fb1344156e88feaee802f0c)) + ## 0.1.0-beta.1 (2025-05-16) Full Changelog: [v0.1.0-alpha.5...v0.1.0-beta.1](https://github.com/openai/openai-ruby/compare/v0.1.0-alpha.5...v0.1.0-beta.1) diff --git a/Gemfile.lock b/Gemfile.lock index aa8e1840..30d820b9 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.1.0.pre.beta.1) + openai (0.1.0.pre.beta.2) connection_pool GEM diff --git a/README.md b/README.md index a9da861e..e2e3ace6 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ puts(chat_completion) We provide support for streaming responses using Server-Sent Events (SSE). -**coming soon:** `openai.chat.completions.stream` will soon come with Python SDK style higher level streaming responses support. +**coming soon:** `openai.chat.completions.stream` will soon come with Python SDK-style higher-level streaming responses support. ```ruby stream = openai.chat.completions.stream_raw( @@ -224,7 +224,7 @@ puts(chat_completion[:my_undocumented_property]) #### Undocumented request params -If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` under the `request_options:` parameter when making a request as seen in examples above. +If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` under the `request_options:` parameter when making a request, as seen in the examples above. #### Undocumented endpoints @@ -242,7 +242,7 @@ response = client.request( ### Concurrency & connection pooling -The `OpenAI::Client` instances are threadsafe, but only are fork-safe when there are no in-flight HTTP requests. +The `OpenAI::Client` instances are threadsafe, but are only fork-safe when there are no in-flight HTTP requests. Each instance of `OpenAI::Client` has its own HTTP connection pool with a default size of 99. As such, we recommend instantiating the client once per application in most settings. @@ -252,7 +252,7 @@ Unless otherwise specified, other classes in the SDK do not have locks protectin ## Sorbet -This library provides comprehensive [RBI](https://sorbet.org/docs/rbi) definitions, and has no dependency on sorbet-runtime. +This library provides comprehensive [RBI](https://sorbet.org/docs/rbi) definitions and has no dependency on sorbet-runtime. You can provide typesafe request parameters like so: @@ -301,7 +301,7 @@ openai.chat.completions.create( # … ) -# Literal values is also permissible: +# Literal values are also permissible: openai.chat.completions.create( reasoning_effort: :low, # … diff --git a/Rakefile b/Rakefile index fe1523b8..bc850886 100644 --- a/Rakefile +++ b/Rakefile @@ -9,6 +9,7 @@ require "rake/clean" require "rubocop/rake_task" tapioca = "sorbet/tapioca" +examples = "examples" ignore_file = ".ignore" CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/ doc/], *FileList["*.gem"], ignore_file) @@ -35,11 +36,11 @@ multitask(:test) do end xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] -locale = {"LC_ALL" => "C.UTF-8"} +ruby_opt = {"RUBYOPT" => [ENV["RUBYOPT"], "--encoding=UTF-8"].compact.join(" ")} desc("Lint `*.rb(i)`") multitask(:"lint:rubocop") do - find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] + find = %w[find ./lib ./test ./rbi ./examples -type f -and ( -name *.rb -or -name *.rbi ) -print0] rubocop = %w[rubocop] rubocop += %w[--format github] if ENV.key?("CI") @@ -54,7 +55,7 @@ end desc("Format `*.rb`") multitask(:"format:rb") do # while `syntax_tree` is much faster than `rubocop`, `rubocop` is the only formatter with full syntax support - find = %w[find ./lib ./test -type f -and -name *.rb -print0] + find = %w[find ./lib ./test ./examples -type f -and -name *.rb -print0] fmt = xargs + %w[rubocop --fail-level F --autocorrect --format simple --] sh("#{find.shelljoin} | #{fmt.shelljoin}") end @@ -63,7 +64,7 @@ desc("Format `*.rbi`") multitask(:"format:rbi") do find = %w[find ./rbi -type f -and -name *.rbi -print0] fmt = xargs + %w[stree write --] - sh(locale, "#{find.shelljoin} | #{fmt.shelljoin}") + sh(ruby_opt, "#{find.shelljoin} | #{fmt.shelljoin}") end desc("Format `*.rbs`") @@ -99,7 +100,7 @@ multitask(:"format:rbs") do # transform class aliases to type aliases, which syntax tree has no trouble with sh("#{find.shelljoin} | #{pre.shelljoin}") # run syntax tree to format `*.rbs` files - sh(locale, "#{find.shelljoin} | #{fmt.shelljoin}") do + sh(ruby_opt, "#{find.shelljoin} | #{fmt.shelljoin}") do success = _1 end # transform type aliases back to class aliases @@ -117,12 +118,14 @@ multitask(:"typecheck:steep") do sh(*%w[steep check]) end +directory(examples) + desc("Typecheck `*.rbi`") -multitask(:"typecheck:sorbet") do - sh(*%w[srb typecheck]) +multitask("typecheck:sorbet": examples) do + sh(*%w[srb typecheck --dir], examples) end -file(tapioca) do +directory(tapioca) do sh(*%w[tapioca init]) end diff --git a/SECURITY.md b/SECURITY.md index 3b3bd8a6..f04523e4 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -2,7 +2,7 @@ ## Reporting Security Issues -This SDK is generated by [Stainless Software Inc](http://stainless.com). Stainless takes security seriously, and encourages you to report any security vulnerability promptly so that appropriate action can be taken. +This SDK is generated by [Stainless Software Inc](http://stainless.com). Stainless takes security seriously and encourages you to report any security vulnerability promptly so that appropriate action can be taken. To report a security issue, please contact the Stainless team at security@stainless.com. @@ -16,13 +16,13 @@ before making any information public. ## Reporting Non-SDK Related Security Issues If you encounter security issues that are not directly related to SDKs but pertain to the services -or products provided by OpenAI please follow the respective company's security reporting guidelines. +or products provided by OpenAI, please follow the respective company's security reporting guidelines. ### OpenAI Terms and Policies Our Security Policy can be found at [Security Policy URL](https://openai.com/policies/coordinated-vulnerability-disclosure-policy). -Please contact disclosure@openai.com for any questions or concerns regarding security of our services. +Please contact disclosure@openai.com for any questions or concerns regarding the security of our services. --- diff --git a/Steepfile b/Steepfile index d7aebca1..528b48c3 100644 --- a/Steepfile +++ b/Steepfile @@ -7,7 +7,7 @@ target(:lib) do signature("sig") - YAML.safe_load_file("./manifest.yaml", symbolize_names: true) => { dependencies: } + YAML.safe_load_file("./manifest.yaml", symbolize_names: true) => {dependencies:} # currently these libraries lack the `*.rbs` annotations required by `steep` stdlibs = dependencies - %w[English etc net/http rbconfig set stringio] diff --git a/examples/structured_outputs_chat_completions.rb b/examples/structured_outputs_chat_completions.rb index 7d52a4b7..f177a208 100755 --- a/examples/structured_outputs_chat_completions.rb +++ b/examples/structured_outputs_chat_completions.rb @@ -29,7 +29,7 @@ class CalendarEvent < OpenAI::BaseModel doc: "Event location" end -# # gets API Key from environment variable `OPENAI_API_KEY` +# gets API Key from environment variable `OPENAI_API_KEY` client = OpenAI::Client.new chat_completion = client.chat.completions.create( diff --git a/lib/openai.rb b/lib/openai.rb index 9f7cc2a1..b5aa2ba6 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -1,7 +1,6 @@ # frozen_string_literal: true # Standard libraries. -# rubocop:disable Lint/RedundantRequireStatement require "English" require "cgi" require "date" @@ -15,8 +14,6 @@ require "stringio" require "time" require "uri" -# rubocop:enable Lint/RedundantRequireStatement - # We already ship the preferred sorbet manifests in the package itself. # `tapioca` currently does not offer us a way to opt out of unnecessary compilation. if Object.const_defined?(:Tapioca) && caller.chain([$PROGRAM_NAME]).chain(ARGV).grep(/tapioca/) @@ -182,7 +179,6 @@ require_relative "openai/models/beta/threads/text_delta_block" require_relative "openai/models/beta/thread_stream_event" require_relative "openai/models/beta/thread_update_params" -require_relative "openai/models/beta/truncation_object" require_relative "openai/models/chat/chat_completion" require_relative "openai/models/chat/chat_completion_assistant_message_param" require_relative "openai/models/chat/chat_completion_audio" @@ -225,6 +221,21 @@ require_relative "openai/models/completion_create_params" require_relative "openai/models/completion_usage" require_relative "openai/models/compound_filter" +require_relative "openai/models/container_create_params" +require_relative "openai/models/container_create_response" +require_relative "openai/models/container_delete_params" +require_relative "openai/models/container_list_params" +require_relative "openai/models/container_list_response" +require_relative "openai/models/container_retrieve_params" +require_relative "openai/models/container_retrieve_response" +require_relative "openai/models/containers/file_create_params" +require_relative "openai/models/containers/file_create_response" +require_relative "openai/models/containers/file_delete_params" +require_relative "openai/models/containers/file_list_params" +require_relative "openai/models/containers/file_list_response" +require_relative "openai/models/containers/file_retrieve_params" +require_relative "openai/models/containers/file_retrieve_response" +require_relative "openai/models/containers/files/content_retrieve_params" require_relative "openai/models/create_embedding_response" require_relative "openai/models/embedding" require_relative "openai/models/embedding_create_params" @@ -235,18 +246,13 @@ require_relative "openai/models/eval_custom_data_source_config" require_relative "openai/models/eval_delete_params" require_relative "openai/models/eval_delete_response" -require_relative "openai/models/eval_item" require_relative "openai/models/eval_list_params" require_relative "openai/models/eval_list_response" -require_relative "openai/models/eval_logs_data_source_config" require_relative "openai/models/eval_retrieve_params" require_relative "openai/models/eval_retrieve_response" require_relative "openai/models/evals/create_eval_completions_run_data_source" require_relative "openai/models/evals/create_eval_jsonl_run_data_source" -require_relative "openai/models/evals/create_eval_responses_run_data_source" require_relative "openai/models/evals/eval_api_error" -require_relative "openai/models/evals/eval_jsonl_file_content_source" -require_relative "openai/models/evals/eval_jsonl_file_id_source" require_relative "openai/models/evals/run_cancel_params" require_relative "openai/models/evals/run_cancel_response" require_relative "openai/models/evals/run_create_params" @@ -343,6 +349,7 @@ require_relative "openai/models/responses/response_audio_done_event" require_relative "openai/models/responses/response_audio_transcript_delta_event" require_relative "openai/models/responses/response_audio_transcript_done_event" +require_relative "openai/models/responses/response_cancel_params" require_relative "openai/models/responses/response_code_interpreter_call_code_delta_event" require_relative "openai/models/responses/response_code_interpreter_call_code_done_event" require_relative "openai/models/responses/response_code_interpreter_call_completed_event" @@ -373,6 +380,10 @@ require_relative "openai/models/responses/response_function_tool_call_item" require_relative "openai/models/responses/response_function_tool_call_output_item" require_relative "openai/models/responses/response_function_web_search" +require_relative "openai/models/responses/response_image_gen_call_completed_event" +require_relative "openai/models/responses/response_image_gen_call_generating_event" +require_relative "openai/models/responses/response_image_gen_call_in_progress_event" +require_relative "openai/models/responses/response_image_gen_call_partial_image_event" require_relative "openai/models/responses/response_includable" require_relative "openai/models/responses/response_incomplete_event" require_relative "openai/models/responses/response_in_progress_event" @@ -387,6 +398,14 @@ require_relative "openai/models/responses/response_input_text" require_relative "openai/models/responses/response_item" require_relative "openai/models/responses/response_item_list" +require_relative "openai/models/responses/response_mcp_call_arguments_delta_event" +require_relative "openai/models/responses/response_mcp_call_arguments_done_event" +require_relative "openai/models/responses/response_mcp_call_completed_event" +require_relative "openai/models/responses/response_mcp_call_failed_event" +require_relative "openai/models/responses/response_mcp_call_in_progress_event" +require_relative "openai/models/responses/response_mcp_list_tools_completed_event" +require_relative "openai/models/responses/response_mcp_list_tools_failed_event" +require_relative "openai/models/responses/response_mcp_list_tools_in_progress_event" require_relative "openai/models/responses/response_output_audio" require_relative "openai/models/responses/response_output_item" require_relative "openai/models/responses/response_output_item_added_event" @@ -394,7 +413,13 @@ require_relative "openai/models/responses/response_output_message" require_relative "openai/models/responses/response_output_refusal" require_relative "openai/models/responses/response_output_text" +require_relative "openai/models/responses/response_output_text_annotation_added_event" +require_relative "openai/models/responses/response_queued_event" +require_relative "openai/models/responses/response_reasoning_delta_event" +require_relative "openai/models/responses/response_reasoning_done_event" require_relative "openai/models/responses/response_reasoning_item" +require_relative "openai/models/responses/response_reasoning_summary_delta_event" +require_relative "openai/models/responses/response_reasoning_summary_done_event" require_relative "openai/models/responses/response_reasoning_summary_part_added_event" require_relative "openai/models/responses/response_reasoning_summary_part_done_event" require_relative "openai/models/responses/response_reasoning_summary_text_delta_event" @@ -404,7 +429,6 @@ require_relative "openai/models/responses/response_retrieve_params" require_relative "openai/models/responses/response_status" require_relative "openai/models/responses/response_stream_event" -require_relative "openai/models/responses/response_text_annotation_delta_event" require_relative "openai/models/responses/response_text_config" require_relative "openai/models/responses/response_text_delta_event" require_relative "openai/models/responses/response_text_done_event" @@ -431,7 +455,6 @@ require_relative "openai/models/vector_store_create_params" require_relative "openai/models/vector_store_deleted" require_relative "openai/models/vector_store_delete_params" -require_relative "openai/models/vector_store_expiration_after" require_relative "openai/models/vector_store_list_params" require_relative "openai/models/vector_store_retrieve_params" require_relative "openai/models/vector_stores/file_batch_cancel_params" @@ -467,6 +490,9 @@ require_relative "openai/resources/chat/completions" require_relative "openai/resources/chat/completions/messages" require_relative "openai/resources/completions" +require_relative "openai/resources/containers" +require_relative "openai/resources/containers/files" +require_relative "openai/resources/containers/files/content" require_relative "openai/resources/embeddings" require_relative "openai/resources/evals" require_relative "openai/resources/evals/runs" diff --git a/lib/openai/client.rb b/lib/openai/client.rb index 4673f743..b583ead2 100644 --- a/lib/openai/client.rb +++ b/lib/openai/client.rb @@ -72,6 +72,9 @@ class Client < OpenAI::Internal::Transport::BaseClient # @return [OpenAI::Resources::Evals] attr_reader :evals + # @return [OpenAI::Resources::Containers] + attr_reader :containers + # @api private # # @return [Hash{String=>String}] @@ -147,6 +150,7 @@ def initialize( @uploads = OpenAI::Resources::Uploads.new(client: self) @responses = OpenAI::Resources::Responses.new(client: self) @evals = OpenAI::Resources::Evals.new(client: self) + @containers = OpenAI::Resources::Containers.new(client: self) end end end diff --git a/lib/openai/helpers/structured_output/union_of.rb b/lib/openai/helpers/structured_output/union_of.rb index 4d209daf..9fbdd5d2 100644 --- a/lib/openai/helpers/structured_output/union_of.rb +++ b/lib/openai/helpers/structured_output/union_of.rb @@ -26,7 +26,10 @@ def to_json_schema_inner(state:) mergeable_keys = {[:anyOf] => 0, [:type] => 0} schemas = variants.to_enum.with_index.map do new_state = {**state, path: [*path, "?.#{_2}"]} - OpenAI::Helpers::StructuredOutput::JsonSchemaConverter.to_json_schema_inner(_1, state: new_state) + OpenAI::Helpers::StructuredOutput::JsonSchemaConverter.to_json_schema_inner( + _1, + state: new_state + ) end schemas.each do |schema| @@ -46,9 +49,13 @@ def initialize(*variants) case variants in [Symbol => d, Hash => vs] discriminator(d) - vs.each { variant(_1, _2) } + vs.each do |k, v| + v.is_a?(Proc) ? variant(k, v) : variant(k, -> { v }) + end else - variants.each { variant(_1) } + variants.each do |v| + v.is_a?(Proc) ? variant(v) : variant(-> { v }) + end end end end diff --git a/lib/openai/internal.rb b/lib/openai/internal.rb index 01f2c4b7..0fc8d8ad 100644 --- a/lib/openai/internal.rb +++ b/lib/openai/internal.rb @@ -13,5 +13,8 @@ module Internal define_sorbet_constant!(:AnyHash) do T.type_alias { T::Hash[Symbol, T.anything] } end + define_sorbet_constant!(:FileInput) do + T.type_alias { T.any(Pathname, StringIO, IO, String, OpenAI::FilePart) } + end end end diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index 3c043142..80606cca 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -12,6 +12,7 @@ module Type # Array of items of a given type. class ArrayOf include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport private_class_method :new @@ -110,6 +111,13 @@ def dump(value, state:) end end + # @api private + # + # @return [Object] + def to_sorbet_type + T::Array[OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(item_type)] + end + # @api private # # @return [generic] diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 7fd973f0..2ed6657f 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -304,6 +304,13 @@ def dump(value, state:) acc end + + # @api private + # + # @return [Object] + def to_sorbet_type + self + end end class << self diff --git a/lib/openai/internal/type/boolean.rb b/lib/openai/internal/type/boolean.rb index 3e852539..23c4d1f9 100644 --- a/lib/openai/internal/type/boolean.rb +++ b/lib/openai/internal/type/boolean.rb @@ -10,6 +10,7 @@ module Type # Ruby has no Boolean class; this is something for models to refer to. class Boolean extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport private_class_method :new @@ -56,6 +57,13 @@ def coerce(value, state:) # @option state [Boolean] :can_retry # # @return [Boolean, Object] + + # @api private + # + # @return [Object] + def to_sorbet_type + T::Boolean + end end end end diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index c28bf11f..9dd70f63 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -112,6 +112,18 @@ def coerce(value, state:) # # @return [Symbol, Object] + # @api private + # + # @return [Object] + def to_sorbet_type + case values + in [] + T.noreturn + in [value, *_] + T.all(OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(value), self) + end + end + # @api private # # @param depth [Integer] diff --git a/lib/openai/internal/type/file_input.rb b/lib/openai/internal/type/file_input.rb index 3ed13ec6..e1e948f3 100644 --- a/lib/openai/internal/type/file_input.rb +++ b/lib/openai/internal/type/file_input.rb @@ -89,6 +89,13 @@ def dump(value, state:) value end + + # @api private + # + # @return [Object] + def to_sorbet_type + T.any(Pathname, StringIO, IO, String, OpenAI::FilePart) + end end end end diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index 6e60bc15..9dcf259b 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -12,6 +12,7 @@ module Type # Hash of items of a given type. class HashOf include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport private_class_method :new @@ -130,6 +131,13 @@ def dump(value, state:) end end + # @api private + # + # @return [Object] + def to_sorbet_type + T::Hash[OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(item_type)] + end + # @api private # # @return [generic] diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 47a040ff..3eed40a8 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -216,6 +216,18 @@ def dump(value, state:) super end + # @api private + # + # @return [Object] + def to_sorbet_type + case (v = variants) + in [] + T.noreturn + else + T.any(*v.map { OpenAI::Internal::Util::SorbetRuntimeSupport.to_sorbet_type(_1) }) + end + end + # rubocop:enable Style/CaseEquality # rubocop:enable Style/HashEachMethods diff --git a/lib/openai/internal/type/unknown.rb b/lib/openai/internal/type/unknown.rb index a629570f..bc8b7a95 100644 --- a/lib/openai/internal/type/unknown.rb +++ b/lib/openai/internal/type/unknown.rb @@ -10,6 +10,7 @@ module Type # When we don't know what to expect for the value. class Unknown extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport # rubocop:disable Lint/UnusedMethodArgument @@ -58,6 +59,13 @@ def coerce(value, state:) # @option state [Boolean] :can_retry # # @return [Object] + + # @api private + # + # @return [Object] + def to_sorbet_type + T.anything + end end # rubocop:enable Lint/UnusedMethodArgument diff --git a/lib/openai/internal/util.rb b/lib/openai/internal/util.rb index b33fa88f..c84c9e78 100644 --- a/lib/openai/internal/util.rb +++ b/lib/openai/internal/util.rb @@ -9,6 +9,23 @@ module Util # @return [Float] def self.monotonic_secs = Process.clock_gettime(Process::CLOCK_MONOTONIC) + # @api private + # + # @param ns [Module, Class] + # + # @return [Enumerable] + def self.walk_namespaces(ns) + ns.constants(false).lazy.flat_map do + case (c = ns.const_get(_1, false)) + in Module | Class + walk_namespaces(c) + else + [] + end + end + .chain([ns]) + end + class << self # @api private # @@ -826,11 +843,39 @@ def const_missing(name) sorbet_runtime_constants.fetch(name).call end + # @api private + # + # @param name [Symbol] + # + # @return [Boolean] + def sorbet_constant_defined?(name) = sorbet_runtime_constants.key?(name) + # @api private # # @param name [Symbol] # @param blk [Proc] def define_sorbet_constant!(name, &blk) = sorbet_runtime_constants.store(name, blk) + + # @api private + # + # @return [Object] + def to_sorbet_type = raise NotImplementedError + + class << self + # @api private + # + # @param type [OpenAI::Internal::Util::SorbetRuntimeSupport, Object] + # + # @return [Object] + def to_sorbet_type(type) + case type + in OpenAI::Internal::Util::SorbetRuntimeSupport + type.to_sorbet_type + else + type + end + end + end end extend OpenAI::Internal::Util::SorbetRuntimeSupport diff --git a/lib/openai/models.rb b/lib/openai/models.rb index c993468a..a0f5d753 100644 --- a/lib/openai/models.rb +++ b/lib/openai/models.rb @@ -5,29 +5,40 @@ module OpenAI cls.define_sorbet_constant!(:OrHash) { T.type_alias { T.any(cls, OpenAI::Internal::AnyHash) } } end - [ - *OpenAI::Internal::Type::Enum.included_modules, - *OpenAI::Internal::Type::Union.included_modules - ].each do |cls| - cls.constants.each do |name| - case cls.const_get(name) - in true | false - cls.define_sorbet_constant!(:TaggedBoolean) { T.type_alias { T.all(T::Boolean, cls) } } - cls.define_sorbet_constant!(:OrBoolean) { T.type_alias { T::Boolean } } - in Integer - cls.define_sorbet_constant!(:TaggedInteger) { T.type_alias { T.all(Integer, cls) } } - cls.define_sorbet_constant!(:OrInteger) { T.type_alias { Integer } } - in Float - cls.define_sorbet_constant!(:TaggedFloat) { T.type_alias { T.all(Float, cls) } } - cls.define_sorbet_constant!(:OrFloat) { T.type_alias { Float } } - in Symbol - cls.define_sorbet_constant!(:TaggedSymbol) { T.type_alias { T.all(Symbol, cls) } } - cls.define_sorbet_constant!(:OrSymbol) { T.type_alias { T.any(Symbol, String) } } - else + OpenAI::Internal::Util.walk_namespaces(OpenAI::Models).each do |mod| + case mod + in OpenAI::Internal::Type::Enum | OpenAI::Internal::Type::Union + mod.constants.each do |name| + case mod.const_get(name) + in true | false + mod.define_sorbet_constant!(:TaggedBoolean) { T.type_alias { T.all(T::Boolean, mod) } } + mod.define_sorbet_constant!(:OrBoolean) { T.type_alias { T::Boolean } } + in Integer + mod.define_sorbet_constant!(:TaggedInteger) { T.type_alias { T.all(Integer, mod) } } + mod.define_sorbet_constant!(:OrInteger) { T.type_alias { Integer } } + in Float + mod.define_sorbet_constant!(:TaggedFloat) { T.type_alias { T.all(Float, mod) } } + mod.define_sorbet_constant!(:OrFloat) { T.type_alias { Float } } + in Symbol + mod.define_sorbet_constant!(:TaggedSymbol) { T.type_alias { T.all(Symbol, mod) } } + mod.define_sorbet_constant!(:OrSymbol) { T.type_alias { T.any(Symbol, String) } } + else + end end + else end end + OpenAI::Internal::Util.walk_namespaces(OpenAI::Models) + .lazy + .grep(OpenAI::Internal::Type::Union) + .each do |mod| + const = :Variants + next if mod.sorbet_constant_defined?(const) + + mod.define_sorbet_constant!(const) { T.type_alias { mod.to_sorbet_type } } + end + AllModels = OpenAI::Models::AllModels Audio = OpenAI::Models::Audio @@ -70,6 +81,16 @@ module OpenAI CompoundFilter = OpenAI::Models::CompoundFilter + ContainerCreateParams = OpenAI::Models::ContainerCreateParams + + ContainerDeleteParams = OpenAI::Models::ContainerDeleteParams + + ContainerListParams = OpenAI::Models::ContainerListParams + + ContainerRetrieveParams = OpenAI::Models::ContainerRetrieveParams + + Containers = OpenAI::Models::Containers + CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse Embedding = OpenAI::Models::Embedding @@ -86,12 +107,8 @@ module OpenAI EvalDeleteParams = OpenAI::Models::EvalDeleteParams - EvalItem = OpenAI::Models::EvalItem - EvalListParams = OpenAI::Models::EvalListParams - EvalLogsDataSourceConfig = OpenAI::Models::EvalLogsDataSourceConfig - EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams Evals = OpenAI::Models::Evals @@ -208,8 +225,6 @@ module OpenAI VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams - VectorStoreExpirationAfter = OpenAI::Models::VectorStoreExpirationAfter - VectorStoreListParams = OpenAI::Models::VectorStoreListParams VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index a337cfe0..06729722 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -24,13 +24,7 @@ module ResponsesOnlyModel end # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel, Symbol, OpenAI::AllModels::ResponsesOnlyModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(String, OpenAI::ChatModel::TaggedSymbol, OpenAI::AllModels::ResponsesOnlyModel::TaggedSymbol) - end - end + # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel)] end end end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 6a4e469f..28cb4113 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -18,7 +18,7 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. # - # @return [String, Symbol, OpenAI::Audio::SpeechModel] + # @return [String, Symbol, OpenAI::Models::Audio::SpeechModel] required :model, union: -> { OpenAI::Audio::SpeechCreateParams::Model } # @!attribute voice @@ -27,7 +27,7 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # `verse`. Previews of the voices are available in the # [Text to speech guide](https://platform.openai.com/docs/guides/text-to-speech#voice-options). # - # @return [String, Symbol, OpenAI::Audio::SpeechCreateParams::Voice] + # @return [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] required :voice, union: -> { OpenAI::Audio::SpeechCreateParams::Voice } # @!attribute instructions @@ -41,7 +41,7 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. # - # @return [Symbol, OpenAI::Audio::SpeechCreateParams::ResponseFormat, nil] + # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Audio::SpeechCreateParams::ResponseFormat } # @!attribute speed @@ -57,13 +57,13 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # # @param input [String] The text to generate audio for. The maximum length is 4096 characters. # - # @param model [String, Symbol, OpenAI::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): + # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): # - # @param voice [String, Symbol, OpenAI::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # # @param instructions [String] Control the voice of your generated audio with additional instructions. Does not # - # @param response_format [Symbol, OpenAI::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav + # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav # # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is # @@ -80,11 +80,7 @@ module Model variant enum: -> { OpenAI::Audio::SpeechModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::Audio::SpeechModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::Audio::SpeechModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)] end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -96,27 +92,27 @@ module Voice variant String - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ALLOY } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ALLOY } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ASH } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ASH } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::BALLAD } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::BALLAD } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::CORAL } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::CORAL } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ECHO } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ECHO } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::FABLE } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::FABLE } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::ONYX } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::ONYX } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::NOVA } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::NOVA } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::SAGE } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::SAGE } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::SHIMMER } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::SHIMMER } - variant const: -> { OpenAI::Audio::SpeechCreateParams::Voice::VERSE } + variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::VERSE } # @!method self.variants # @return [Array(String, Symbol)] diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index cd9c0b2d..82dc9e67 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -15,19 +15,19 @@ class Transcription < OpenAI::Internal::Type::BaseModel # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added # to the `include` array. # - # @return [Array, nil] + # @return [Array, nil] optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::Transcription::Logprob] } # @!method initialize(text:, logprobs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Audio::Transcription} for more details. + # {OpenAI::Models::Audio::Transcription} for more details. # # Represents a transcription response returned by model, based on the provided # input. # # @param text [String] The transcribed text. # - # @param logprobs [Array] The log probabilities of the tokens in the transcription. Only returned with the + # @param logprobs [Array] The log probabilities of the tokens in the transcription. Only returned with the class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute token diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 0c99423b..2d51435c 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -14,7 +14,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart] required :file, OpenAI::Internal::Type::FileInput # @!attribute model @@ -22,7 +22,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source # Whisper V2 model). # - # @return [String, Symbol, OpenAI::AudioModel] + # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Audio::TranscriptionCreateParams::Model } # @!attribute chunking_strategy @@ -31,7 +31,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # boundaries. `server_vad` object can be provided to tweak VAD detection # parameters manually. If unset, the audio is transcribed as a single block. # - # @return [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] + # @return [Symbol, :auto, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] optional :chunking_strategy, union: -> { OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy }, nil?: true @@ -43,7 +43,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # response_format set to `json` and only with the models `gpt-4o-transcribe` and # `gpt-4o-mini-transcribe`. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionInclude] } # @!attribute language @@ -68,7 +68,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. # - # @return [Symbol, OpenAI::AudioResponseFormat, nil] + # @return [Symbol, OpenAI::Models::AudioResponseFormat, nil] optional :response_format, enum: -> { OpenAI::AudioResponseFormat } # @!attribute temperature @@ -88,7 +88,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # is no additional latency for segment timestamps, but generating word timestamps # incurs additional latency. # - # @return [Array, nil] + # @return [Array, nil] optional :timestamp_granularities, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Audio::TranscriptionCreateParams::TimestampGranularity] @@ -98,23 +98,23 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::TranscriptionCreateParams} for more details. # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # - # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # - # @param chunking_strategy [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs + # @param chunking_strategy [Symbol, :auto, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs # - # @param include [Array] Additional information to include in the transcription response. + # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment # - # @param response_format [Symbol, OpenAI::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # - # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -130,11 +130,7 @@ module Model variant enum: -> { OpenAI::AudioModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::AudioModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::AudioModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] end # Controls how the audio is cut into chunks. When set to `"auto"`, the server @@ -153,7 +149,7 @@ class VadConfig < OpenAI::Internal::Type::BaseModel # @!attribute type # Must be set to `server_vad` to enable manual chunking using server side VAD. # - # @return [Symbol, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] + # @return [Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] required :type, enum: -> { OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type @@ -183,10 +179,10 @@ class VadConfig < OpenAI::Internal::Type::BaseModel # @!method initialize(type:, prefix_padding_ms: nil, silence_duration_ms: nil, threshold: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig} for more - # details. + # {OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig} + # for more details. # - # @param type [Symbol, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] Must be set to `server_vad` to enable manual chunking using server side VAD. + # @param type [Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::Type] Must be set to `server_vad` to enable manual chunking using server side VAD. # # @param prefix_padding_ms [Integer] Amount of audio to include before the VAD detected speech (in # @@ -196,7 +192,7 @@ class VadConfig < OpenAI::Internal::Type::BaseModel # Must be set to `server_vad` to enable manual chunking using server side VAD. # - # @see OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig#type + # @see OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig#type module Type extend OpenAI::Internal::Type::Enum @@ -208,13 +204,7 @@ module Type end # @!method self.variants - # @return [Array(Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(Symbol, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig) - end - end + # @return [Array(Symbol, :auto, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig)] end module TimestampGranularity diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 8f0ea45a..0bbe16b7 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -19,11 +19,7 @@ module TranscriptionCreateResponse variant -> { OpenAI::Audio::TranscriptionVerbose } # @!method self.variants - # @return [Array(OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose) } - end + # @return [Array(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)] end end end diff --git a/lib/openai/models/audio/transcription_segment.rb b/lib/openai/models/audio/transcription_segment.rb index 8b7b4416..3ca8d867 100644 --- a/lib/openai/models/audio/transcription_segment.rb +++ b/lib/openai/models/audio/transcription_segment.rb @@ -69,7 +69,7 @@ class TranscriptionSegment < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, avg_logprob:, compression_ratio:, end_:, no_speech_prob:, seek:, start:, temperature:, text:, tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionSegment} for more details. + # {OpenAI::Models::Audio::TranscriptionSegment} for more details. # # @param id [Integer] Unique identifier of the segment. # diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index 93079e55..2112080e 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -19,13 +19,7 @@ module TranscriptionStreamEvent variant :"transcript.text.done", -> { OpenAI::Audio::TranscriptionTextDoneEvent } # @!method self.variants - # @return [Array(OpenAI::Audio::TranscriptionTextDeltaEvent, OpenAI::Audio::TranscriptionTextDoneEvent)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Audio::TranscriptionTextDeltaEvent, OpenAI::Audio::TranscriptionTextDoneEvent) - end - end + # @return [Array(OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent)] end end end diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index 58db67b8..0541f312 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -21,13 +21,13 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. # - # @return [Array, nil] + # @return [Array, nil] optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] } # @!method initialize(delta:, logprobs: nil, type: :"transcript.text.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionTextDeltaEvent} for more details. + # {OpenAI::Models::Audio::TranscriptionTextDeltaEvent} for more details. # # Emitted when there is an additional text delta. This is also the first event # emitted when the transcription starts. Only emitted when you @@ -36,7 +36,7 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param delta [String] The text delta that was additionally transcribed. # - # @param logprobs [Array] The log probabilities of the delta. Only included if you [create a transcription + # @param logprobs [Array] The log probabilities of the delta. Only included if you [create a transcription # # @param type [Symbol, :"transcript.text.delta"] The type of the event. Always `transcript.text.delta`. @@ -61,7 +61,7 @@ class Logprob < OpenAI::Internal::Type::BaseModel # @!method initialize(token: nil, bytes: nil, logprob: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob} for more details. + # {OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob} for more details. # # @param token [String] The token that was used to generate the log probability. # diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index 2b2eb5b2..2651d973 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -22,13 +22,13 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. # - # @return [Array, nil] + # @return [Array, nil] optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] } # @!method initialize(text:, logprobs: nil, type: :"transcript.text.done") # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionTextDoneEvent} for more details. + # {OpenAI::Models::Audio::TranscriptionTextDoneEvent} for more details. # # Emitted when the transcription is complete. Contains the complete transcription # text. Only emitted when you @@ -37,7 +37,7 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # # @param text [String] The text that was transcribed. # - # @param logprobs [Array] The log probabilities of the individual tokens in the transcription. Only includ + # @param logprobs [Array] The log probabilities of the individual tokens in the transcription. Only includ # # @param type [Symbol, :"transcript.text.done"] The type of the event. Always `transcript.text.done`. @@ -62,7 +62,7 @@ class Logprob < OpenAI::Internal::Type::BaseModel # @!method initialize(token: nil, bytes: nil, logprob: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Audio::TranscriptionTextDoneEvent::Logprob} for more details. + # {OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob} for more details. # # @param token [String] The token that was used to generate the log probability. # diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index 00cf9ea0..eaa0ebf3 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -25,13 +25,13 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # @!attribute segments # Segments of the transcribed text and their corresponding details. # - # @return [Array, nil] + # @return [Array, nil] optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionSegment] } # @!attribute words # Extracted words and their corresponding timestamps. # - # @return [Array, nil] + # @return [Array, nil] optional :words, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionWord] } # @!method initialize(duration:, language:, text:, segments: nil, words: nil) @@ -44,9 +44,9 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # # @param text [String] The transcribed text. # - # @param segments [Array] Segments of the transcribed text and their corresponding details. + # @param segments [Array] Segments of the transcribed text and their corresponding details. # - # @param words [Array] Extracted words and their corresponding timestamps. + # @param words [Array] Extracted words and their corresponding timestamps. end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index a3594a7c..35e3dd1b 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -12,14 +12,14 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart] required :file, OpenAI::Internal::Type::FileInput # @!attribute model # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. # - # @return [String, Symbol, OpenAI::AudioModel] + # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Audio::TranslationCreateParams::Model } # @!attribute prompt @@ -35,7 +35,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. # - # @return [Symbol, OpenAI::Audio::TranslationCreateParams::ResponseFormat, nil] + # @return [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Audio::TranslationCreateParams::ResponseFormat } # @!attribute temperature @@ -52,13 +52,13 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Audio::TranslationCreateParams} for more details. # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, # - # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment # - # @param response_format [Symbol, OpenAI::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # @@ -75,11 +75,7 @@ module Model variant enum: -> { OpenAI::AudioModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::AudioModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::AudioModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] end # The format of the output, in one of these options: `json`, `text`, `srt`, diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index c0fcb1fc..7e056468 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -12,11 +12,7 @@ module TranslationCreateResponse variant -> { OpenAI::Audio::TranslationVerbose } # @!method self.variants - # @return [Array(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose) } - end + # @return [Array(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)] end end end diff --git a/lib/openai/models/audio/translation_verbose.rb b/lib/openai/models/audio/translation_verbose.rb index a237803c..1bb16b1e 100644 --- a/lib/openai/models/audio/translation_verbose.rb +++ b/lib/openai/models/audio/translation_verbose.rb @@ -25,7 +25,7 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel # @!attribute segments # Segments of the translated text and their corresponding details. # - # @return [Array, nil] + # @return [Array, nil] optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Audio::TranscriptionSegment] } # @!method initialize(duration:, language:, text:, segments: nil) @@ -35,7 +35,7 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel # # @param text [String] The translated text. # - # @param segments [Array] Segments of the translated text and their corresponding details. + # @param segments [Array] Segments of the translated text and their corresponding details. end end end diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index 84f42355..b8dffe10 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -42,7 +42,7 @@ class Batch < OpenAI::Internal::Type::BaseModel # @!attribute status # The current status of the batch. # - # @return [Symbol, OpenAI::Batch::Status] + # @return [Symbol, OpenAI::Models::Batch::Status] required :status, enum: -> { OpenAI::Batch::Status } # @!attribute cancelled_at @@ -71,7 +71,7 @@ class Batch < OpenAI::Internal::Type::BaseModel # @!attribute errors # - # @return [OpenAI::Batch::Errors, nil] + # @return [OpenAI::Models::Batch::Errors, nil] optional :errors, -> { OpenAI::Batch::Errors } # @!attribute expired_at @@ -124,12 +124,12 @@ class Batch < OpenAI::Internal::Type::BaseModel # @!attribute request_counts # The request counts for different statuses within the batch. # - # @return [OpenAI::BatchRequestCounts, nil] + # @return [OpenAI::Models::BatchRequestCounts, nil] optional :request_counts, -> { OpenAI::BatchRequestCounts } # @!method initialize(id:, completion_window:, created_at:, endpoint:, input_file_id:, status:, cancelled_at: nil, cancelling_at: nil, completed_at: nil, error_file_id: nil, errors: nil, expired_at: nil, expires_at: nil, failed_at: nil, finalizing_at: nil, in_progress_at: nil, metadata: nil, output_file_id: nil, request_counts: nil, object: :batch) - # Some parameter documentations has been truncated, see {OpenAI::Batch} for more - # details. + # Some parameter documentations has been truncated, see {OpenAI::Models::Batch} + # for more details. # # @param id [String] # @@ -141,7 +141,7 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @param input_file_id [String] The ID of the input file for the batch. # - # @param status [Symbol, OpenAI::Batch::Status] The current status of the batch. + # @param status [Symbol, OpenAI::Models::Batch::Status] The current status of the batch. # # @param cancelled_at [Integer] The Unix timestamp (in seconds) for when the batch was cancelled. # @@ -151,7 +151,7 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @param error_file_id [String] The ID of the file containing the outputs of requests with errors. # - # @param errors [OpenAI::Batch::Errors] + # @param errors [OpenAI::Models::Batch::Errors] # # @param expired_at [Integer] The Unix timestamp (in seconds) for when the batch expired. # @@ -167,13 +167,13 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @param output_file_id [String] The ID of the file containing the outputs of successfully executed requests. # - # @param request_counts [OpenAI::BatchRequestCounts] The request counts for different statuses within the batch. + # @param request_counts [OpenAI::Models::BatchRequestCounts] The request counts for different statuses within the batch. # # @param object [Symbol, :batch] The object type, which is always `batch`. # The current status of the batch. # - # @see OpenAI::Batch#status + # @see OpenAI::Models::Batch#status module Status extend OpenAI::Internal::Type::Enum @@ -190,11 +190,11 @@ module Status # @return [Array] end - # @see OpenAI::Batch#errors + # @see OpenAI::Models::Batch#errors class Errors < OpenAI::Internal::Type::BaseModel # @!attribute data # - # @return [Array, nil] + # @return [Array, nil] optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::BatchError] } # @!attribute object @@ -204,7 +204,7 @@ class Errors < OpenAI::Internal::Type::BaseModel optional :object, String # @!method initialize(data: nil, object: nil) - # @param data [Array] + # @param data [Array] # # @param object [String] The object type, which is always `list`. end diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index ce21fc86..9b39fcd2 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -11,7 +11,7 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # The time frame within which the batch should be processed. Currently only `24h` # is supported. # - # @return [Symbol, OpenAI::BatchCreateParams::CompletionWindow] + # @return [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] required :completion_window, enum: -> { OpenAI::BatchCreateParams::CompletionWindow } # @!attribute endpoint @@ -20,7 +20,7 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # are supported. Note that `/v1/embeddings` batches are also restricted to a # maximum of 50,000 embedding inputs across all requests in the batch. # - # @return [Symbol, OpenAI::BatchCreateParams::Endpoint] + # @return [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] required :endpoint, enum: -> { OpenAI::BatchCreateParams::Endpoint } # @!attribute input_file_id @@ -52,9 +52,9 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::BatchCreateParams} for more details. # - # @param completion_window [Symbol, OpenAI::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` + # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` # - # @param endpoint [Symbol, OpenAI::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` + # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` # # @param input_file_id [String] The ID of an uploaded file that contains requests for the new batch. # diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index d6aa3ad5..7bbd1868 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -68,7 +68,7 @@ class Assistant < OpenAI::Internal::Type::BaseModel # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute response_format @@ -93,7 +93,7 @@ class Assistant < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature @@ -110,7 +110,7 @@ class Assistant < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Beta::Assistant::ToolResources, nil] + # @return [OpenAI::Models::Beta::Assistant::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::Assistant::ToolResources }, nil?: true # @!attribute top_p @@ -124,8 +124,8 @@ class Assistant < OpenAI::Internal::Type::BaseModel optional :top_p, Float, nil?: true # @!method initialize(id:, created_at:, description:, instructions:, metadata:, model:, name:, tools:, response_format: nil, temperature: nil, tool_resources: nil, top_p: nil, object: :assistant) - # Some parameter documentations has been truncated, see {OpenAI::Beta::Assistant} - # for more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Assistant} for more details. # # Represents an `assistant` that can call the model and use tools. # @@ -143,28 +143,28 @@ class Assistant < OpenAI::Internal::Type::BaseModel # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_resources [OpenAI::Beta::Assistant::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::Assistant::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # # @param object [Symbol, :assistant] The object type, which is always `assistant`. - # @see OpenAI::Beta::Assistant#tool_resources + # @see OpenAI::Models::Beta::Assistant#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::Assistant::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Beta::Assistant::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::Assistant::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -173,10 +173,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Beta::Assistant::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::Assistant::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] - # @see OpenAI::Beta::Assistant::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::Assistant::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -188,12 +188,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Assistant::ToolResources::CodeInterpreter} for more details. + # {OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter} for more + # details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::Assistant::ToolResources#file_search + # @see OpenAI::Models::Beta::Assistant::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The ID of the @@ -206,7 +207,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Assistant::ToolResources::FileSearch} for more details. + # {OpenAI::Models::Beta::Assistant::ToolResources::FileSearch} for more details. # # @param vector_store_ids [Array] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index c3a32b57..84e6d083 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -15,7 +15,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::ChatModel] + # @return [String, Symbol, OpenAI::Models::ChatModel] required :model, union: -> { OpenAI::Beta::AssistantCreateParams::Model } # @!attribute description @@ -56,7 +56,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format @@ -81,7 +81,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature @@ -98,7 +98,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Beta::AssistantCreateParams::ToolResources, nil] + # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::AssistantCreateParams::ToolResources }, nil?: true # @!attribute tools @@ -106,7 +106,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute top_p @@ -123,7 +123,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::AssistantCreateParams} for more details. # - # @param model [String, Symbol, OpenAI::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. # @@ -133,15 +133,15 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_resources [OpenAI::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # @@ -161,17 +161,13 @@ module Model variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter @@ -179,7 +175,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute file_search # - # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -188,10 +184,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] - # @see OpenAI::Beta::AssistantCreateParams::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -203,13 +199,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter} for more - # details. + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter} + # for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::AssistantCreateParams::ToolResources#file_search + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -226,7 +222,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # with file_ids and attach it to this assistant. There can be a maximum of 1 # vector store attached to the assistant. # - # @return [Array, nil] + # @return [Array, nil] optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] @@ -234,19 +230,19 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch} for more - # details. + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch} for + # more details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ # - # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] + # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, union: -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy @@ -273,10 +269,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore} + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore} # for more details. # - # @param chunking_strategy [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad # @@ -285,7 +281,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @see OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -319,7 +315,7 @@ class Auto < OpenAI::Internal::Type::BaseModel class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, -> { OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static @@ -332,11 +328,11 @@ class Static < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param static [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] # # @param type [Symbol, :static] Always `static`. - # @see OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static + # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. @@ -355,7 +351,7 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # {OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} # for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. @@ -365,16 +361,7 @@ class Static < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - end - end + # @return [Array(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index 3c936a2a..752af1a6 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -37,7 +37,7 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Beta::AssistantListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Beta::AssistantListParams::Order, nil] optional :order, enum: -> { OpenAI::Beta::AssistantListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) @@ -50,7 +50,7 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index 4e58c10e..7541c8b2 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -43,18 +43,7 @@ module AssistantResponseFormatOption variant -> { OpenAI::ResponseFormatJSONSchema } # @!method self.variants - # @return [Array(Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - Symbol, - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONObject, - OpenAI::ResponseFormatJSONSchema - ) - end - end + # @return [Array(Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema)] end end end diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 6f78210f..0245a53a 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -123,7 +123,7 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). # - # @return [OpenAI::Beta::Thread] + # @return [OpenAI::Models::Beta::Thread] required :data, -> { OpenAI::Beta::Thread } # @!attribute event @@ -139,13 +139,13 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, enabled: nil, event: :"thread.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadCreated} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated} for more details. # # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is # created. # - # @param data [OpenAI::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap + # @param data [OpenAI::Models::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap # # @param enabled [Boolean] Whether to enable input audio transcription. # @@ -157,7 +157,7 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -167,12 +167,12 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated} for more details. # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.created"] end @@ -182,7 +182,7 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -192,12 +192,12 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.queued") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.queued"] end @@ -207,7 +207,7 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -217,12 +217,13 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.in_progress"] end @@ -232,7 +233,7 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -242,12 +243,13 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.requires_action") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.requires_action"] end @@ -257,7 +259,7 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -267,12 +269,13 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.completed"] end @@ -282,7 +285,7 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -292,12 +295,13 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.incomplete"] end @@ -307,7 +311,7 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -317,12 +321,12 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.failed"] end @@ -332,7 +336,7 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -342,12 +346,13 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelling") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelling"] end @@ -357,7 +362,7 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -367,12 +372,13 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelled"] end @@ -382,7 +388,7 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -392,12 +398,12 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.expired"] end @@ -406,7 +412,7 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -416,13 +422,14 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.created"] end @@ -431,7 +438,7 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -441,13 +448,14 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.in_progress"] end @@ -457,7 +465,7 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during # streaming. # - # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] required :data, -> { OpenAI::Beta::Threads::Runs::RunStepDeltaEvent } # @!attribute event @@ -467,13 +475,14 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta} for more + # details. # # Occurs when parts of a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # are being streamed. # - # @param data [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami # # @param event [Symbol, :"thread.run.step.delta"] end @@ -482,7 +491,7 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -492,13 +501,14 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.completed"] end @@ -507,7 +517,7 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -517,13 +527,14 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.failed"] end @@ -532,7 +543,7 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -542,13 +553,14 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.cancelled"] end @@ -557,7 +569,7 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -567,13 +579,14 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.expired"] end @@ -583,7 +596,7 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -593,13 +606,14 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.created"] end @@ -609,7 +623,7 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -619,13 +633,14 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.in_progress"] end @@ -635,7 +650,7 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during # streaming. # - # @return [OpenAI::Beta::Threads::MessageDeltaEvent] + # @return [OpenAI::Models::Beta::Threads::MessageDeltaEvent] required :data, -> { OpenAI::Beta::Threads::MessageDeltaEvent } # @!attribute event @@ -645,13 +660,14 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta} for more + # details. # # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are # being streamed. # - # @param data [OpenAI::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming + # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming # # @param event [Symbol, :"thread.message.delta"] end @@ -661,7 +677,7 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -671,13 +687,14 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.completed"] end @@ -687,7 +704,7 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -697,13 +714,14 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete} for more details. + # {OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.incomplete"] end @@ -711,7 +729,7 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel class ErrorEvent < OpenAI::Internal::Type::BaseModel # @!attribute data # - # @return [OpenAI::ErrorObject] + # @return [OpenAI::Models::ErrorObject] required :data, -> { OpenAI::ErrorObject } # @!attribute event @@ -724,43 +742,12 @@ class ErrorEvent < OpenAI::Internal::Type::BaseModel # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. # This can happen due to an internal server error or a timeout. # - # @param data [OpenAI::ErrorObject] + # @param data [OpenAI::Models::ErrorObject] # @param event [Symbol, :error] end # @!method self.variants - # @return [Array(OpenAI::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Beta::AssistantStreamEvent::ErrorEvent)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Beta::AssistantStreamEvent::ErrorEvent - ) - end - end + # @return [Array(OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent)] end end end diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index c6f7c311..495ff39a 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -15,13 +15,7 @@ module AssistantTool variant :function, -> { OpenAI::Beta::FunctionTool } # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::FileSearchTool, OpenAI::Beta::FunctionTool)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::FileSearchTool, OpenAI::Beta::FunctionTool) - end - end + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] end end end diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 7f43fa4a..a4fc7a03 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -7,25 +7,25 @@ class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the tool. If type is `function`, the function name must be set # - # @return [Symbol, OpenAI::Beta::AssistantToolChoice::Type] + # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] required :type, enum: -> { OpenAI::Beta::AssistantToolChoice::Type } # @!attribute function # - # @return [OpenAI::Beta::AssistantToolChoiceFunction, nil] + # @return [OpenAI::Models::Beta::AssistantToolChoiceFunction, nil] optional :function, -> { OpenAI::Beta::AssistantToolChoiceFunction } # @!method initialize(type:, function: nil) # Specifies a tool the model should use. Use to force the model to call a specific # tool. # - # @param type [Symbol, OpenAI::Beta::AssistantToolChoice::Type] The type of the tool. If type is `function`, the function name must be set + # @param type [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] The type of the tool. If type is `function`, the function name must be set # - # @param function [OpenAI::Beta::AssistantToolChoiceFunction] + # @param function [OpenAI::Models::Beta::AssistantToolChoiceFunction] # The type of the tool. If type is `function`, the function name must be set # - # @see OpenAI::Beta::AssistantToolChoice#type + # @see OpenAI::Models::Beta::AssistantToolChoice#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 0561c756..8bfdb818 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -35,16 +35,7 @@ module Auto end # @!method self.variants - # @return [Array(Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Beta::AssistantToolChoice - ) - end - end + # @return [Array(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice)] end end end diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 40734bc9..6f8f9b27 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -39,7 +39,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::Beta::AssistantUpdateParams::Model, nil] + # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model, nil] optional :model, union: -> { OpenAI::Beta::AssistantUpdateParams::Model } # @!attribute name @@ -56,7 +56,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format @@ -81,7 +81,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature @@ -98,7 +98,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Beta::AssistantUpdateParams::ToolResources, nil] + # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::AssistantUpdateParams::ToolResources }, nil?: true # @!attribute tools @@ -106,7 +106,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute top_p @@ -129,19 +129,19 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_resources [OpenAI::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # @@ -157,77 +157,77 @@ module Model variant String - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_2025_04_14 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_2025_04_14 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI_2025_04_14 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_MINI_2025_04_14 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO_2025_04_14 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1_NANO_2025_04_14 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O3_MINI } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O3_MINI_2025_01_31 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O3_MINI_2025_01_31 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O1 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O1 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::O1_2024_12_17 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::O1_2024_12_17 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_2024_11_20 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_11_20 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_2024_08_06 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_08_06 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_2024_05_13 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_2024_05_13 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_MINI } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4O_MINI_2024_07_18 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4O_MINI_2024_07_18 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW_2025_02_27 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_5_PREVIEW_2025_02_27 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_TURBO } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_2024_04_09 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_2024_04_09 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_0125_PREVIEW } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0125_PREVIEW } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_PREVIEW } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_TURBO_PREVIEW } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_1106_PREVIEW } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_1106_PREVIEW } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_VISION_PREVIEW } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_VISION_PREVIEW } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_0314 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0314 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_0613 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_0613 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_32K } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_32K_0314 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0314 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_4_32K_0613 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_4_32K_0613 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0613 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0613 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_1106 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_1106 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0125 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_0125 } - variant const: -> { OpenAI::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 } + variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 } # @!method self.variants # @return [Array(String, Symbol)] @@ -281,7 +281,7 @@ module Model class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter @@ -289,7 +289,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute file_search # - # @return [OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -298,10 +298,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] - # @see OpenAI::Beta::AssistantUpdateParams::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # Overrides the list of @@ -314,13 +314,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter} for more - # details. + # {OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter} + # for more details. # # @param file_ids [Array] Overrides the list of [file](https://platform.openai.com/docs/api-reference/file end - # @see OpenAI::Beta::AssistantUpdateParams::ToolResources#file_search + # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # Overrides the @@ -333,8 +333,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch} for more - # details. + # {OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch} for + # more details. # # @param vector_store_ids [Array] Overrides the [vector store](https://platform.openai.com/docs/api-reference/vect end diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index c521e6a6..e12b3e5a 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -13,15 +13,15 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute file_search # Overrides for the file search tool. # - # @return [OpenAI::Beta::FileSearchTool::FileSearch, nil] + # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::FileSearchTool::FileSearch } # @!method initialize(file_search: nil, type: :file_search) - # @param file_search [OpenAI::Beta::FileSearchTool::FileSearch] Overrides for the file search tool. + # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] Overrides for the file search tool. # # @param type [Symbol, :file_search] The type of tool being defined: `file_search` - # @see OpenAI::Beta::FileSearchTool#file_search + # @see OpenAI::Models::Beta::FileSearchTool#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute max_num_results # The maximum number of results the file search tool should output. The default is @@ -44,20 +44,20 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions, nil] + # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions } # @!method initialize(max_num_results: nil, ranking_options: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::FileSearchTool::FileSearch} for more details. + # {OpenAI::Models::Beta::FileSearchTool::FileSearch} for more details. # # Overrides for the file search tool. # # @param max_num_results [Integer] The maximum number of results the file search tool should output. The default is # - # @param ranking_options [OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions] The ranking options for the file search. If not specified, the file search tool + # @param ranking_options [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] The ranking options for the file search. If not specified, the file search tool - # @see OpenAI::Beta::FileSearchTool::FileSearch#ranking_options + # @see OpenAI::Models::Beta::FileSearchTool::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute score_threshold # The score threshold for the file search. All values must be a floating point @@ -70,12 +70,13 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @return [Symbol, OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker, nil] + # @return [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker } # @!method initialize(score_threshold:, ranker: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions} for more details. + # {OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions} for more + # details. # # The ranking options for the file search. If not specified, the file search tool # will use the `auto` ranker and a score_threshold of 0. @@ -86,12 +87,12 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # # @param score_threshold [Float] The score threshold for the file search. All values must be a floating point num # - # @param ranker [Symbol, OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank + # @param ranker [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @see OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions#ranker + # @see OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/function_tool.rb b/lib/openai/models/beta/function_tool.rb index 512eb078..361c2c44 100644 --- a/lib/openai/models/beta/function_tool.rb +++ b/lib/openai/models/beta/function_tool.rb @@ -6,7 +6,7 @@ module Beta class FunctionTool < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::FunctionDefinition] + # @return [OpenAI::Models::FunctionDefinition] required :function, -> { OpenAI::FunctionDefinition } # @!attribute type @@ -16,7 +16,7 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel required :type, const: :function # @!method initialize(function:, type: :function) - # @param function [OpenAI::FunctionDefinition] + # @param function [OpenAI::Models::FunctionDefinition] # # @param type [Symbol, :function] The type of tool being defined: `function` end diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 564f9417..9f75576e 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -34,7 +34,7 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -44,13 +44,14 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated} for more details. + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # created. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.created"] end @@ -60,7 +61,7 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -70,13 +71,14 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress} for more details. + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) moves # to an `in_progress` state. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.in_progress"] end @@ -86,7 +88,7 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # Represents a message delta i.e. any changed fields on a message during # streaming. # - # @return [OpenAI::Beta::Threads::MessageDeltaEvent] + # @return [OpenAI::Models::Beta::Threads::MessageDeltaEvent] required :data, -> { OpenAI::Beta::Threads::MessageDeltaEvent } # @!attribute event @@ -96,13 +98,13 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta} for more details. + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta} for more details. # # Occurs when parts of a # [Message](https://platform.openai.com/docs/api-reference/messages/object) are # being streamed. # - # @param data [OpenAI::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming + # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] Represents a message delta i.e. any changed fields on a message during streaming # # @param event [Symbol, :"thread.message.delta"] end @@ -112,7 +114,7 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -122,13 +124,14 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted} for more details. + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) is # completed. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.completed"] end @@ -138,7 +141,7 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] required :data, -> { OpenAI::Beta::Threads::Message } # @!attribute event @@ -148,31 +151,20 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.message.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete} for more details. + # {OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete} for more + # details. # # Occurs when a # [message](https://platform.openai.com/docs/api-reference/messages/object) ends # before it is completed. # - # @param data [OpenAI::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe + # @param data [OpenAI::Models::Beta::Threads::Message] Represents a message within a [thread](https://platform.openai.com/docs/api-refe # # @param event [Symbol, :"thread.message.incomplete"] end # @!method self.variants - # @return [Array(OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::MessageStreamEvent::ThreadMessageCreated, - OpenAI::Beta::MessageStreamEvent::ThreadMessageInProgress, - OpenAI::Beta::MessageStreamEvent::ThreadMessageDelta, - OpenAI::Beta::MessageStreamEvent::ThreadMessageCompleted, - OpenAI::Beta::MessageStreamEvent::ThreadMessageIncomplete - ) - end - end + # @return [Array(OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete)] end end end diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 165aeefc..9dbbbb9c 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -39,7 +39,7 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -49,13 +49,14 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is created. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.created"] end @@ -64,7 +65,7 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -74,13 +75,14 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # moves to an `in_progress` state. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.in_progress"] end @@ -90,7 +92,7 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # Represents a run step delta i.e. any changed fields on a run step during # streaming. # - # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] required :data, -> { OpenAI::Beta::Threads::Runs::RunStepDeltaEvent } # @!attribute event @@ -100,13 +102,13 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta} for more details. # # Occurs when parts of a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # are being streamed. # - # @param data [OpenAI::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] Represents a run step delta i.e. any changed fields on a run step during streami # # @param event [Symbol, :"thread.run.step.delta"] end @@ -115,7 +117,7 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -125,13 +127,14 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is completed. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.completed"] end @@ -140,7 +143,7 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -150,13 +153,14 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # fails. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.failed"] end @@ -165,7 +169,7 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -175,13 +179,14 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # is cancelled. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.cancelled"] end @@ -190,7 +195,7 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!attribute data # Represents a step in execution of a run. # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] required :data, -> { OpenAI::Beta::Threads::Runs::RunStep } # @!attribute event @@ -200,33 +205,20 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.step.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired} for more details. + # {OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired} for more + # details. # # Occurs when a # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) # expires. # - # @param data [OpenAI::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] Represents a step in execution of a run. # # @param event [Symbol, :"thread.run.step.expired"] end # @!method self.variants - # @return [Array(OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCreated, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepInProgress, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepDelta, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCompleted, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepFailed, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepCancelled, - OpenAI::Beta::RunStepStreamEvent::ThreadRunStepExpired - ) - end - end + # @return [Array(OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired)] end end end diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index 8c2d8801..2bfe1450 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -45,7 +45,7 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -55,12 +55,12 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunCreated} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated} for more details. # # Occurs when a new # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.created"] end @@ -70,7 +70,7 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -80,12 +80,12 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.queued") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunQueued} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `queued` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.queued"] end @@ -95,7 +95,7 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -105,12 +105,12 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunInProgress} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to an `in_progress` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.in_progress"] end @@ -120,7 +120,7 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -130,12 +130,13 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.requires_action") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction} for more + # details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `requires_action` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.requires_action"] end @@ -145,7 +146,7 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -155,12 +156,12 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunCompleted} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is completed. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.completed"] end @@ -170,7 +171,7 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -180,12 +181,12 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # ends with status `incomplete`. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.incomplete"] end @@ -195,7 +196,7 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -205,12 +206,12 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunFailed} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # fails. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.failed"] end @@ -220,7 +221,7 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -230,12 +231,12 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelling") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunCancelling} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # moves to a `cancelling` status. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelling"] end @@ -245,7 +246,7 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -255,12 +256,12 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.cancelled") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunCancelled} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # is cancelled. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.cancelled"] end @@ -270,7 +271,7 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] required :data, -> { OpenAI::Beta::Threads::Run } # @!attribute event @@ -280,35 +281,18 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, event: :"thread.run.expired") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::RunStreamEvent::ThreadRunExpired} for more details. + # {OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired} for more details. # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) # expires. # - # @param data [OpenAI::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r + # @param data [OpenAI::Models::Beta::Threads::Run] Represents an execution run on a [thread](https://platform.openai.com/docs/api-r # # @param event [Symbol, :"thread.run.expired"] end # @!method self.variants - # @return [Array(OpenAI::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Beta::RunStreamEvent::ThreadRunExpired)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::RunStreamEvent::ThreadRunCreated, - OpenAI::Beta::RunStreamEvent::ThreadRunQueued, - OpenAI::Beta::RunStreamEvent::ThreadRunInProgress, - OpenAI::Beta::RunStreamEvent::ThreadRunRequiresAction, - OpenAI::Beta::RunStreamEvent::ThreadRunCompleted, - OpenAI::Beta::RunStreamEvent::ThreadRunIncomplete, - OpenAI::Beta::RunStreamEvent::ThreadRunFailed, - OpenAI::Beta::RunStreamEvent::ThreadRunCancelling, - OpenAI::Beta::RunStreamEvent::ThreadRunCancelled, - OpenAI::Beta::RunStreamEvent::ThreadRunExpired - ) - end - end + # @return [Array(OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired)] end end end diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index 11d37b69..21ee9dd0 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -40,12 +40,12 @@ class Thread < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Beta::Thread::ToolResources, nil] + # @return [OpenAI::Models::Beta::Thread::ToolResources, nil] required :tool_resources, -> { OpenAI::Beta::Thread::ToolResources }, nil?: true # @!method initialize(id:, created_at:, metadata:, tool_resources:, object: :thread) - # Some parameter documentations has been truncated, see {OpenAI::Beta::Thread} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Thread} for more details. # # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). @@ -56,20 +56,20 @@ class Thread < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param object [Symbol, :thread] The object type, which is always `thread`. - # @see OpenAI::Beta::Thread#tool_resources + # @see OpenAI::Models::Beta::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::Thread::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::Thread::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Beta::Thread::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::Thread::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::Thread::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -78,10 +78,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Beta::Thread::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::Thread::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] - # @see OpenAI::Beta::Thread::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -93,12 +93,12 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Thread::ToolResources::CodeInterpreter} for more details. + # {OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter} for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::Thread::ToolResources#file_search + # @see OpenAI::Models::Beta::Thread::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -111,7 +111,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Thread::ToolResources::FileSearch} for more details. + # {OpenAI::Models::Beta::Thread::ToolResources::FileSearch} for more details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index cbdac287..d244bcee 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -62,7 +62,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # model associated with the assistant. If not, the model associated with the # assistant will be used. # - # @return [String, Symbol, OpenAI::ChatModel, nil] + # @return [String, Symbol, OpenAI::Models::ChatModel, nil] optional :model, union: -> { OpenAI::Beta::ThreadCreateAndRunParams::Model }, nil?: true # @!attribute parallel_tool_calls @@ -95,7 +95,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature @@ -110,7 +110,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, nil] optional :thread, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread } # @!attribute tool_choice @@ -122,7 +122,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. # - # @return [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] + # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tool_resources @@ -131,14 +131,14 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::ThreadCreateAndRunParams::ToolResources }, nil?: true # @!attribute tools # Override the tools the assistant can use for this run. This is useful for # modifying the behavior on a per-run basis. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] @@ -159,8 +159,10 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::TruncationObject, nil] - optional :truncation_strategy, -> { OpenAI::Beta::TruncationObject }, nil?: true + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + optional :truncation_strategy, + -> { OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy }, + nil?: true # @!method initialize(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -176,25 +178,25 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param thread [OpenAI::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -211,11 +213,7 @@ module Model variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class Thread < OpenAI::Internal::Type::BaseModel @@ -223,7 +221,7 @@ class Thread < OpenAI::Internal::Type::BaseModel # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # - # @return [Array, nil] + # @return [Array, nil] optional :messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message] @@ -246,7 +244,7 @@ class Thread < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources @@ -255,22 +253,22 @@ class Thread < OpenAI::Internal::Type::BaseModel # @!method initialize(messages: nil, metadata: nil, tool_resources: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread} for more details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread} for more details. # # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. # - # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content } # @!attribute role @@ -281,13 +279,13 @@ class Message < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role] + # @return [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] required :role, enum: -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment] @@ -307,19 +305,20 @@ class Message < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, attachments: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message} for more details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message} for more + # details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role] The role of the entity that is creating the message. Allowed values include: + # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The text contents of the message. # - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message#content + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#content module Content extend OpenAI::Internal::Type::Union @@ -327,27 +326,10 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { - OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray - } + variant -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) - end - end + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = @@ -361,7 +343,7 @@ module Content # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message#role + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -382,7 +364,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] @@ -391,7 +373,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -417,31 +399,22 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch - ) - end - end + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] end end end - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread#tool_resources + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch @@ -453,10 +426,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -468,13 +441,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter} + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter} # for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -491,7 +464,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # with file_ids and attach it to this thread. There can be a maximum of 1 vector # store attached to the thread. # - # @return [Array, nil] + # @return [Array, nil] optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] @@ -499,19 +472,19 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch} for - # more details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch} + # for more details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ # - # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, union: -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy @@ -538,10 +511,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore} + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore} # for more details. # - # @param chunking_strategy [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad # @@ -550,7 +523,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore#chunking_strategy + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -584,7 +557,7 @@ class Auto < OpenAI::Internal::Type::BaseModel class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, -> { OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static @@ -597,11 +570,11 @@ class Static < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param static [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] # # @param type [Symbol, :static] Always `static`. - # @see OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. @@ -620,7 +593,7 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} # for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. @@ -630,16 +603,7 @@ class Static < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - end - end + # @return [Array(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end @@ -649,7 +613,7 @@ class Static < OpenAI::Internal::Type::BaseModel class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter @@ -657,7 +621,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute file_search # - # @return [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -666,10 +630,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # a list of file IDs, while the `file_search` tool requires a list of vector store # IDs. # - # @param code_interpreter [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] - # @see OpenAI::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -681,13 +645,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter} for - # more details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter} + # for more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::ThreadCreateAndRunParams::ToolResources#file_search + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The ID of the @@ -700,12 +664,58 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch} for more - # details. + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch} for + # more details. # # @param vector_store_ids [Array] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect end end + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @return [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] + required :type, enum: -> { OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type } + + # @!attribute last_messages + # The number of most recent messages from the thread when constructing the context + # for the run. + # + # @return [Integer, nil] + optional :last_messages, Integer, nil?: true + + # @!method initialize(type:, last_messages: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy} for more + # details. + # + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # + # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy#type + module Type + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + LAST_MESSAGES = :last_messages + + # @!method self.values + # @return [Array] + end + end end end end diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index df5b80d4..7c5b41ef 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -12,7 +12,7 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # - # @return [Array, nil] + # @return [Array, nil] optional :messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::Message] } # @!attribute metadata @@ -32,18 +32,18 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Beta::ThreadCreateParams::ToolResources, nil] + # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::ThreadCreateParams::ToolResources }, nil?: true # @!method initialize(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::ThreadCreateParams} for more details. # - # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -51,7 +51,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Beta::ThreadCreateParams::Message::Content } # @!attribute role @@ -62,13 +62,13 @@ class Message < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Beta::ThreadCreateParams::Message::Role] + # @return [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] required :role, enum: -> { OpenAI::Beta::ThreadCreateParams::Message::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::Message::Attachment] @@ -88,19 +88,19 @@ class Message < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, attachments: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateParams::Message} for more details. + # {OpenAI::Models::Beta::ThreadCreateParams::Message} for more details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Beta::ThreadCreateParams::Message::Role] The role of the entity that is creating the message. Allowed values include: + # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The text contents of the message. # - # @see OpenAI::Beta::ThreadCreateParams::Message#content + # @see OpenAI::Models::Beta::ThreadCreateParams::Message#content module Content extend OpenAI::Internal::Type::Union @@ -108,25 +108,10 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { OpenAI::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray } + variant -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) - end - end + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = @@ -140,7 +125,7 @@ module Content # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @see OpenAI::Beta::ThreadCreateParams::Message#role + # @see OpenAI::Models::Beta::ThreadCreateParams::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -161,7 +146,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool] @@ -170,7 +155,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -196,16 +181,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch - ) - end - end + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] end end end @@ -213,12 +189,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -227,10 +203,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] - # @see OpenAI::Beta::ThreadCreateParams::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -242,13 +218,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter} for more - # details. + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter} for + # more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::ThreadCreateParams::ToolResources#file_search + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -265,7 +241,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # with file_ids and attach it to this thread. There can be a maximum of 1 vector # store attached to the thread. # - # @return [Array, nil] + # @return [Array, nil] optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] @@ -273,18 +249,19 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil, vector_stores: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch} for more details. + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch} for more + # details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ # - # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen + # @param vector_stores [Array] A helper to create a [vector store](https://platform.openai.com/docs/api-referen class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] + # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static, nil] optional :chunking_strategy, union: -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy @@ -311,10 +288,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore} for - # more details. + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore} + # for more details. # - # @param chunking_strategy [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to ad # @@ -323,7 +300,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # - # @see OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore#chunking_strategy module ChunkingStrategy extend OpenAI::Internal::Type::Union @@ -357,7 +334,7 @@ class Auto < OpenAI::Internal::Type::BaseModel class Static < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] required :static, -> { OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static @@ -370,11 +347,11 @@ class Static < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param static [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] # # @param type [Symbol, :static] Always `static`. - # @see OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static + # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel # @!attribute chunk_overlap_tokens # The number of tokens that overlap between chunks. The default value is `400`. @@ -393,7 +370,7 @@ class Static < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} + # {OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static} # for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. @@ -403,16 +380,7 @@ class Static < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, - OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static - ) - end - end + # @return [Array(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index 1dbc9873..2af595d4 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -8,7 +8,7 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # Represents a thread that contains # [messages](https://platform.openai.com/docs/api-reference/messages). # - # @return [OpenAI::Beta::Thread] + # @return [OpenAI::Models::Beta::Thread] required :data, -> { OpenAI::Beta::Thread } # @!attribute event @@ -24,13 +24,13 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, enabled: nil, event: :"thread.created") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadStreamEvent} for more details. + # {OpenAI::Models::Beta::ThreadStreamEvent} for more details. # # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is # created. # - # @param data [OpenAI::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap + # @param data [OpenAI::Models::Beta::Thread] Represents a thread that contains [messages](https://platform.openai.com/docs/ap # # @param enabled [Boolean] Whether to enable input audio transcription. # diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index 742aeb19..f2ddde6e 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -25,7 +25,7 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @return [OpenAI::Beta::ThreadUpdateParams::ToolResources, nil] + # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Beta::ThreadUpdateParams::ToolResources }, nil?: true # @!method initialize(metadata: nil, tool_resources: nil, request_options: {}) @@ -34,19 +34,19 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # - # @return [OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter } # @!attribute file_search # - # @return [OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch, nil] + # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch } # @!method initialize(code_interpreter: nil, file_search: nil) @@ -55,10 +55,10 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # `code_interpreter` tool requires a list of file IDs, while the `file_search` # tool requires a list of vector store IDs. # - # @param code_interpreter [OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] - # @param file_search [OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch] + # @param code_interpreter [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] - # @see OpenAI::Beta::ThreadUpdateParams::ToolResources#code_interpreter + # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made @@ -70,13 +70,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(file_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter} for more - # details. + # {OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter} for + # more details. # # @param file_ids [Array] A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made end - # @see OpenAI::Beta::ThreadUpdateParams::ToolResources#file_search + # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute vector_store_ids # The @@ -89,7 +89,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(vector_store_ids: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch} for more details. + # {OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch} for more + # details. # # @param vector_store_ids [Array] The [vector store](https://platform.openai.com/docs/api-reference/vector-stores/ end diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index 080262b3..bcb67b49 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -19,13 +19,7 @@ module Annotation variant :file_path, -> { OpenAI::Beta::Threads::FilePathAnnotation } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::FileCitationAnnotation, OpenAI::Beta::Threads::FilePathAnnotation)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Beta::Threads::FileCitationAnnotation, OpenAI::Beta::Threads::FilePathAnnotation) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation)] end end end diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index 2dbd5956..9eb54f5e 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -19,16 +19,7 @@ module AnnotationDelta variant :file_path, -> { OpenAI::Beta::Threads::FilePathDeltaAnnotation } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Beta::Threads::FilePathDeltaAnnotation)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Beta::Threads::FilePathDeltaAnnotation - ) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation)] end end end diff --git a/lib/openai/models/beta/threads/file_citation_annotation.rb b/lib/openai/models/beta/threads/file_citation_annotation.rb index bb8a4050..1d0a2a74 100644 --- a/lib/openai/models/beta/threads/file_citation_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_annotation.rb @@ -12,7 +12,7 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_citation # - # @return [OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation] + # @return [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] required :file_citation, -> { OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation } # @!attribute start_index @@ -39,7 +39,7 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_citation [OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation] + # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] # # @param start_index [Integer] # @@ -47,7 +47,7 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_citation] Always `file_citation`. - # @see OpenAI::Beta::Threads::FileCitationAnnotation#file_citation + # @see OpenAI::Models::Beta::Threads::FileCitationAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the specific File the citation is from. diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index 4449922e..db18b6e4 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -24,7 +24,7 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_citation # - # @return [OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, nil] + # @return [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, nil] optional :file_citation, -> { OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation } # @!attribute start_index @@ -47,7 +47,7 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_citation [OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] + # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] # # @param start_index [Integer] # @@ -55,7 +55,7 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_citation] Always `file_citation`. - # @see OpenAI::Beta::Threads::FileCitationDeltaAnnotation#file_citation + # @see OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the specific File the citation is from. diff --git a/lib/openai/models/beta/threads/file_path_annotation.rb b/lib/openai/models/beta/threads/file_path_annotation.rb index 90055353..6c4e70c1 100644 --- a/lib/openai/models/beta/threads/file_path_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_annotation.rb @@ -12,7 +12,7 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_path # - # @return [OpenAI::Beta::Threads::FilePathAnnotation::FilePath] + # @return [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] required :file_path, -> { OpenAI::Beta::Threads::FilePathAnnotation::FilePath } # @!attribute start_index @@ -38,7 +38,7 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_path [OpenAI::Beta::Threads::FilePathAnnotation::FilePath] + # @param file_path [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] # # @param start_index [Integer] # @@ -46,7 +46,7 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_path] Always `file_path`. - # @see OpenAI::Beta::Threads::FilePathAnnotation#file_path + # @see OpenAI::Models::Beta::Threads::FilePathAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file that was generated. diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index 659b9518..041f2a4a 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -24,7 +24,7 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @!attribute file_path # - # @return [OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath, nil] + # @return [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, nil] optional :file_path, -> { OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath } # @!attribute start_index @@ -46,7 +46,7 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param end_index [Integer] # - # @param file_path [OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath] + # @param file_path [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath] # # @param start_index [Integer] # @@ -54,7 +54,7 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @param type [Symbol, :file_path] Always `file_path`. - # @see OpenAI::Beta::Threads::FilePathDeltaAnnotation#file_path + # @see OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The ID of the file that was generated. diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index b71b6a5a..53cf02ed 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -17,21 +17,21 @@ class ImageFile < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @return [Symbol, OpenAI::Beta::Threads::ImageFile::Detail, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail, nil] optional :detail, enum: -> { OpenAI::Beta::Threads::ImageFile::Detail } # @!method initialize(file_id:, detail: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::ImageFile} for more details. + # {OpenAI::Models::Beta::Threads::ImageFile} for more details. # # @param file_id [String] The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # - # @param detail [Symbol, OpenAI::Beta::Threads::ImageFile::Detail] Specifies the detail level of the image if specified by the user. `low` uses few + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] Specifies the detail level of the image if specified by the user. `low` uses few # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @see OpenAI::Beta::Threads::ImageFile#detail + # @see OpenAI::Models::Beta::Threads::ImageFile#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_file_content_block.rb b/lib/openai/models/beta/threads/image_file_content_block.rb index 09da28f8..2ae8fe56 100644 --- a/lib/openai/models/beta/threads/image_file_content_block.rb +++ b/lib/openai/models/beta/threads/image_file_content_block.rb @@ -7,7 +7,7 @@ module Threads class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_file # - # @return [OpenAI::Beta::Threads::ImageFile] + # @return [OpenAI::Models::Beta::Threads::ImageFile] required :image_file, -> { OpenAI::Beta::Threads::ImageFile } # @!attribute type @@ -20,7 +20,7 @@ class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. # - # @param image_file [OpenAI::Beta::Threads::ImageFile] + # @param image_file [OpenAI::Models::Beta::Threads::ImageFile] # # @param type [Symbol, :image_file] Always `image_file`. end diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 886ed307..b1d4c62e 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -9,7 +9,7 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @return [Symbol, OpenAI::Beta::Threads::ImageFileDelta::Detail, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail, nil] optional :detail, enum: -> { OpenAI::Beta::Threads::ImageFileDelta::Detail } # @!attribute file_id @@ -22,16 +22,16 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(detail: nil, file_id: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::ImageFileDelta} for more details. + # {OpenAI::Models::Beta::Threads::ImageFileDelta} for more details. # - # @param detail [Symbol, OpenAI::Beta::Threads::ImageFileDelta::Detail] Specifies the detail level of the image if specified by the user. `low` uses few + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] Specifies the detail level of the image if specified by the user. `low` uses few # # @param file_id [String] The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # - # @see OpenAI::Beta::Threads::ImageFileDelta#detail + # @see OpenAI::Models::Beta::Threads::ImageFileDelta#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_file_delta_block.rb b/lib/openai/models/beta/threads/image_file_delta_block.rb index 206e36dd..8657f912 100644 --- a/lib/openai/models/beta/threads/image_file_delta_block.rb +++ b/lib/openai/models/beta/threads/image_file_delta_block.rb @@ -19,7 +19,7 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_file # - # @return [OpenAI::Beta::Threads::ImageFileDelta, nil] + # @return [OpenAI::Models::Beta::Threads::ImageFileDelta, nil] optional :image_file, -> { OpenAI::Beta::Threads::ImageFileDelta } # @!method initialize(index:, image_file: nil, type: :image_file) @@ -28,7 +28,7 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # # @param index [Integer] The index of the content part in the message. # - # @param image_file [OpenAI::Beta::Threads::ImageFileDelta] + # @param image_file [OpenAI::Models::Beta::Threads::ImageFileDelta] # # @param type [Symbol, :image_file] Always `image_file`. end diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index c932079d..a78260eb 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -16,21 +16,21 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` # - # @return [Symbol, OpenAI::Beta::Threads::ImageURL::Detail, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail, nil] optional :detail, enum: -> { OpenAI::Beta::Threads::ImageURL::Detail } # @!method initialize(url:, detail: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::ImageURL} for more details. + # {OpenAI::Models::Beta::Threads::ImageURL} for more details. # # @param url [String] The external URL of the image, must be a supported image types: jpeg, jpg, png, # - # @param detail [Symbol, OpenAI::Beta::Threads::ImageURL::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` # - # @see OpenAI::Beta::Threads::ImageURL#detail + # @see OpenAI::Models::Beta::Threads::ImageURL#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_url_content_block.rb b/lib/openai/models/beta/threads/image_url_content_block.rb index 52d35a06..d2f1a28d 100644 --- a/lib/openai/models/beta/threads/image_url_content_block.rb +++ b/lib/openai/models/beta/threads/image_url_content_block.rb @@ -7,7 +7,7 @@ module Threads class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_url # - # @return [OpenAI::Beta::Threads::ImageURL] + # @return [OpenAI::Models::Beta::Threads::ImageURL] required :image_url, -> { OpenAI::Beta::Threads::ImageURL } # @!attribute type @@ -19,7 +19,7 @@ class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, type: :image_url) # References an image URL in the content of a message. # - # @param image_url [OpenAI::Beta::Threads::ImageURL] + # @param image_url [OpenAI::Models::Beta::Threads::ImageURL] # # @param type [Symbol, :image_url] The type of the content part. end diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index c8b10e43..43fba03f 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -9,7 +9,7 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. # - # @return [Symbol, OpenAI::Beta::Threads::ImageURLDelta::Detail, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail, nil] optional :detail, enum: -> { OpenAI::Beta::Threads::ImageURLDelta::Detail } # @!attribute url @@ -21,16 +21,16 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(detail: nil, url: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::ImageURLDelta} for more details. + # {OpenAI::Models::Beta::Threads::ImageURLDelta} for more details. # - # @param detail [Symbol, OpenAI::Beta::Threads::ImageURLDelta::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # # @param url [String] The URL of the image, must be a supported image types: jpeg, jpg, png, gif, webp # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. # - # @see OpenAI::Beta::Threads::ImageURLDelta#detail + # @see OpenAI::Models::Beta::Threads::ImageURLDelta#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/image_url_delta_block.rb b/lib/openai/models/beta/threads/image_url_delta_block.rb index efe44526..72079ef1 100644 --- a/lib/openai/models/beta/threads/image_url_delta_block.rb +++ b/lib/openai/models/beta/threads/image_url_delta_block.rb @@ -19,7 +19,7 @@ class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute image_url # - # @return [OpenAI::Beta::Threads::ImageURLDelta, nil] + # @return [OpenAI::Models::Beta::Threads::ImageURLDelta, nil] optional :image_url, -> { OpenAI::Beta::Threads::ImageURLDelta } # @!method initialize(index:, image_url: nil, type: :image_url) @@ -27,7 +27,7 @@ class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # # @param index [Integer] The index of the content part in the message. # - # @param image_url [OpenAI::Beta::Threads::ImageURLDelta] + # @param image_url [OpenAI::Models::Beta::Threads::ImageURLDelta] # # @param type [Symbol, :image_url] Always `image_url`. end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index ad5cd0f2..d0053ec3 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -23,7 +23,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute attachments # A list of files attached to the message, and the tools they were added to. # - # @return [Array, nil] + # @return [Array, nil] required :attachments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Message::Attachment] }, nil?: true @@ -37,7 +37,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the message in array of text and/or images. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContent] @@ -58,7 +58,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute incomplete_details # On an incomplete message, details about why the message is incomplete. # - # @return [OpenAI::Beta::Threads::Message::IncompleteDetails, nil] + # @return [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] required :incomplete_details, -> { OpenAI::Beta::Threads::Message::IncompleteDetails }, nil?: true # @!attribute metadata @@ -81,7 +81,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute role # The entity that produced the message. One of `user` or `assistant`. # - # @return [Symbol, OpenAI::Beta::Threads::Message::Role] + # @return [Symbol, OpenAI::Models::Beta::Threads::Message::Role] required :role, enum: -> { OpenAI::Beta::Threads::Message::Role } # @!attribute run_id @@ -96,7 +96,7 @@ class Message < OpenAI::Internal::Type::BaseModel # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. # - # @return [Symbol, OpenAI::Beta::Threads::Message::Status] + # @return [Symbol, OpenAI::Models::Beta::Threads::Message::Status] required :status, enum: -> { OpenAI::Beta::Threads::Message::Status } # @!attribute thread_id @@ -108,7 +108,7 @@ class Message < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, assistant_id:, attachments:, completed_at:, content:, created_at:, incomplete_at:, incomplete_details:, metadata:, role:, run_id:, status:, thread_id:, object: :"thread.message") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Message} for more details. + # {OpenAI::Models::Beta::Threads::Message} for more details. # # Represents a message within a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -117,25 +117,25 @@ class Message < OpenAI::Internal::Type::BaseModel # # @param assistant_id [String, nil] If applicable, the ID of the [assistant](https://platform.openai.com/docs/api-re # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they were added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they were added to. # # @param completed_at [Integer, nil] The Unix timestamp (in seconds) for when the message was completed. # - # @param content [Array] The content of the message in array of text and/or images. + # @param content [Array] The content of the message in array of text and/or images. # # @param created_at [Integer] The Unix timestamp (in seconds) for when the message was created. # # @param incomplete_at [Integer, nil] The Unix timestamp (in seconds) for when the message was marked as incomplete. # - # @param incomplete_details [OpenAI::Beta::Threads::Message::IncompleteDetails, nil] On an incomplete message, details about why the message is incomplete. + # @param incomplete_details [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] On an incomplete message, details about why the message is incomplete. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param role [Symbol, OpenAI::Beta::Threads::Message::Role] The entity that produced the message. One of `user` or `assistant`. + # @param role [Symbol, OpenAI::Models::Beta::Threads::Message::Role] The entity that produced the message. One of `user` or `assistant`. # # @param run_id [String, nil] The ID of the [run](https://platform.openai.com/docs/api-reference/runs) associa # - # @param status [Symbol, OpenAI::Beta::Threads::Message::Status] The status of the message, which can be either `in_progress`, `incomplete`, or ` + # @param status [Symbol, OpenAI::Models::Beta::Threads::Message::Status] The status of the message, which can be either `in_progress`, `incomplete`, or ` # # @param thread_id [String] The [thread](https://platform.openai.com/docs/api-reference/threads) ID that thi # @@ -151,7 +151,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Message::Attachment::Tool] @@ -160,7 +160,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -183,35 +183,26 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) - end - end + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] end end - # @see OpenAI::Beta::Threads::Message#incomplete_details + # @see OpenAI::Models::Beta::Threads::Message#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute reason # The reason the message is incomplete. # - # @return [Symbol, OpenAI::Beta::Threads::Message::IncompleteDetails::Reason] + # @return [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] required :reason, enum: -> { OpenAI::Beta::Threads::Message::IncompleteDetails::Reason } # @!method initialize(reason:) # On an incomplete message, details about why the message is incomplete. # - # @param reason [Symbol, OpenAI::Beta::Threads::Message::IncompleteDetails::Reason] The reason the message is incomplete. + # @param reason [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] The reason the message is incomplete. # The reason the message is incomplete. # - # @see OpenAI::Beta::Threads::Message::IncompleteDetails#reason + # @see OpenAI::Models::Beta::Threads::Message::IncompleteDetails#reason module Reason extend OpenAI::Internal::Type::Enum @@ -228,7 +219,7 @@ module Reason # The entity that produced the message. One of `user` or `assistant`. # - # @see OpenAI::Beta::Threads::Message#role + # @see OpenAI::Models::Beta::Threads::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -242,7 +233,7 @@ module Role # The status of the message, which can be either `in_progress`, `incomplete`, or # `completed`. # - # @see OpenAI::Beta::Threads::Message#status + # @see OpenAI::Models::Beta::Threads::Message#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index f40e35dd..f0771098 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -24,18 +24,7 @@ module MessageContent variant :refusal, -> { OpenAI::Beta::Threads::RefusalContentBlock } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::ImageFileContentBlock, OpenAI::Beta::Threads::ImageURLContentBlock, OpenAI::Beta::Threads::TextContentBlock, OpenAI::Beta::Threads::RefusalContentBlock)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlock, - OpenAI::Beta::Threads::RefusalContentBlock - ) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock)] end end end diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index 80ecb9cb..908eb4d7 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -24,18 +24,7 @@ module MessageContentDelta variant :image_url, -> { OpenAI::Beta::Threads::ImageURLDeltaBlock } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::ImageFileDeltaBlock, OpenAI::Beta::Threads::TextDeltaBlock, OpenAI::Beta::Threads::RefusalDeltaBlock, OpenAI::Beta::Threads::ImageURLDeltaBlock)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Beta::Threads::TextDeltaBlock, - OpenAI::Beta::Threads::RefusalDeltaBlock, - OpenAI::Beta::Threads::ImageURLDeltaBlock - ) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock)] end end end diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index 65e1a5b3..254bd67f 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -21,17 +21,7 @@ module MessageContentPartParam variant :text, -> { OpenAI::Beta::Threads::TextContentBlockParam } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::ImageFileContentBlock, OpenAI::Beta::Threads::ImageURLContentBlock, OpenAI::Beta::Threads::TextContentBlockParam)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam)] end end end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 11c3ea83..25a63182 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -12,7 +12,7 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Beta::Threads::MessageCreateParams::Content } # @!attribute role @@ -23,13 +23,13 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Beta::Threads::MessageCreateParams::Role] + # @return [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] required :role, enum: -> { OpenAI::Beta::Threads::MessageCreateParams::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::MessageCreateParams::Attachment] @@ -51,11 +51,11 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Beta::Threads::MessageCreateParams} for more details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -69,25 +69,10 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { OpenAI::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray } + variant -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) - end - end + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = @@ -120,7 +105,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool] @@ -129,7 +114,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -155,16 +140,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch - ) - end - end + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] end end end diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index 831356c3..8d151261 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -8,26 +8,26 @@ class MessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the message in array of text and/or images. # - # @return [Array, nil] + # @return [Array, nil] optional :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::MessageContentDelta] } # @!attribute role # The entity that produced the message. One of `user` or `assistant`. # - # @return [Symbol, OpenAI::Beta::Threads::MessageDelta::Role, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role, nil] optional :role, enum: -> { OpenAI::Beta::Threads::MessageDelta::Role } # @!method initialize(content: nil, role: nil) # The delta containing the fields that have changed on the Message. # - # @param content [Array] The content of the message in array of text and/or images. + # @param content [Array] The content of the message in array of text and/or images. # - # @param role [Symbol, OpenAI::Beta::Threads::MessageDelta::Role] The entity that produced the message. One of `user` or `assistant`. + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] The entity that produced the message. One of `user` or `assistant`. # The entity that produced the message. One of `user` or `assistant`. # - # @see OpenAI::Beta::Threads::MessageDelta#role + # @see OpenAI::Models::Beta::Threads::MessageDelta#role module Role extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/message_delta_event.rb b/lib/openai/models/beta/threads/message_delta_event.rb index 510cd5cf..14190d67 100644 --- a/lib/openai/models/beta/threads/message_delta_event.rb +++ b/lib/openai/models/beta/threads/message_delta_event.rb @@ -14,7 +14,7 @@ class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # The delta containing the fields that have changed on the Message. # - # @return [OpenAI::Beta::Threads::MessageDelta] + # @return [OpenAI::Models::Beta::Threads::MessageDelta] required :delta, -> { OpenAI::Beta::Threads::MessageDelta } # @!attribute object @@ -29,7 +29,7 @@ class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param id [String] The identifier of the message, which can be referenced in API endpoints. # - # @param delta [OpenAI::Beta::Threads::MessageDelta] The delta containing the fields that have changed on the Message. + # @param delta [OpenAI::Models::Beta::Threads::MessageDelta] The delta containing the fields that have changed on the Message. # # @param object [Symbol, :"thread.message.delta"] The object type, which is always `thread.message.delta`. end diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 6ffe6655..1358425b 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -38,7 +38,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Beta::Threads::MessageListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order, nil] optional :order, enum: -> { OpenAI::Beta::Threads::MessageListParams::Order } # @!attribute run_id @@ -57,7 +57,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param run_id [String] Filter messages by the run ID that generated them. # diff --git a/lib/openai/models/beta/threads/required_action_function_tool_call.rb b/lib/openai/models/beta/threads/required_action_function_tool_call.rb index 66fbe931..7f1eee07 100644 --- a/lib/openai/models/beta/threads/required_action_function_tool_call.rb +++ b/lib/openai/models/beta/threads/required_action_function_tool_call.rb @@ -17,7 +17,7 @@ class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # The function definition. # - # @return [OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function] + # @return [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] required :function, -> { OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function } # @!attribute type @@ -29,17 +29,18 @@ class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, function:, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::RequiredActionFunctionToolCall} for more details. + # {OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall} for more + # details. # # Tool call objects # # @param id [String] The ID of the tool call. This ID must be referenced when you submit the tool out # - # @param function [OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function] The function definition. + # @param function [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] The function definition. # # @param type [Symbol, :function] The type of tool call the output is required for. For now, this is always `funct - # @see OpenAI::Beta::Threads::RequiredActionFunctionToolCall#function + # @see OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments that the model expects you to pass to the function. diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index 5ee65dfe..2533a7a1 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -56,7 +56,7 @@ class Run < OpenAI::Internal::Type::BaseModel # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. # - # @return [OpenAI::Beta::Threads::Run::IncompleteDetails, nil] + # @return [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] required :incomplete_details, -> { OpenAI::Beta::Threads::Run::IncompleteDetails }, nil?: true # @!attribute instructions @@ -70,7 +70,7 @@ class Run < OpenAI::Internal::Type::BaseModel # @!attribute last_error # The last error associated with this run. Will be `null` if there are no errors. # - # @return [OpenAI::Beta::Threads::Run::LastError, nil] + # @return [OpenAI::Models::Beta::Threads::Run::LastError, nil] required :last_error, -> { OpenAI::Beta::Threads::Run::LastError }, nil?: true # @!attribute max_completion_tokens @@ -124,7 +124,7 @@ class Run < OpenAI::Internal::Type::BaseModel # Details on the action required to continue the run. Will be `null` if no action # is required. # - # @return [OpenAI::Beta::Threads::Run::RequiredAction, nil] + # @return [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] required :required_action, -> { OpenAI::Beta::Threads::Run::RequiredAction }, nil?: true # @!attribute response_format @@ -149,7 +149,7 @@ class Run < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] required :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute started_at @@ -163,7 +163,7 @@ class Run < OpenAI::Internal::Type::BaseModel # `requires_action`, `cancelling`, `cancelled`, `failed`, `completed`, # `incomplete`, or `expired`. # - # @return [Symbol, OpenAI::Beta::Threads::RunStatus] + # @return [Symbol, OpenAI::Models::Beta::Threads::RunStatus] required :status, enum: -> { OpenAI::Beta::Threads::RunStatus } # @!attribute thread_id @@ -182,7 +182,7 @@ class Run < OpenAI::Internal::Type::BaseModel # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. # - # @return [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] + # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] required :tool_choice, union: -> { OpenAI::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tools @@ -190,21 +190,21 @@ class Run < OpenAI::Internal::Type::BaseModel # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for # this run. # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] } # @!attribute truncation_strategy # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::TruncationObject, nil] - required :truncation_strategy, -> { OpenAI::Beta::TruncationObject }, nil?: true + # @return [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] + required :truncation_strategy, -> { OpenAI::Beta::Threads::Run::TruncationStrategy }, nil?: true # @!attribute usage # Usage statistics related to the run. This value will be `null` if the run is not # in a terminal state (i.e. `in_progress`, `queued`, etc.). # - # @return [OpenAI::Beta::Threads::Run::Usage, nil] + # @return [OpenAI::Models::Beta::Threads::Run::Usage, nil] required :usage, -> { OpenAI::Beta::Threads::Run::Usage }, nil?: true # @!attribute temperature @@ -221,7 +221,7 @@ class Run < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expires_at:, failed_at:, incomplete_details:, instructions:, last_error:, max_completion_tokens:, max_prompt_tokens:, metadata:, model:, parallel_tool_calls:, required_action:, response_format:, started_at:, status:, thread_id:, tool_choice:, tools:, truncation_strategy:, usage:, temperature: nil, top_p: nil, object: :"thread.run") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Run} for more details. + # {OpenAI::Models::Beta::Threads::Run} for more details. # # Represents an execution run on a # [thread](https://platform.openai.com/docs/api-reference/threads). @@ -240,11 +240,11 @@ class Run < OpenAI::Internal::Type::BaseModel # # @param failed_at [Integer, nil] The Unix timestamp (in seconds) for when the run failed. # - # @param incomplete_details [OpenAI::Beta::Threads::Run::IncompleteDetails, nil] Details on why the run is incomplete. Will be `null` if the run is not incomplet + # @param incomplete_details [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] Details on why the run is incomplete. Will be `null` if the run is not incomplet # # @param instructions [String] The instructions that the [assistant](https://platform.openai.com/docs/api-refer # - # @param last_error [OpenAI::Beta::Threads::Run::LastError, nil] The last error associated with this run. Will be `null` if there are no errors. + # @param last_error [OpenAI::Models::Beta::Threads::Run::LastError, nil] The last error associated with this run. Will be `null` if there are no errors. # # @param max_completion_tokens [Integer, nil] The maximum number of completion tokens specified to have been used over the cou # @@ -256,23 +256,23 @@ class Run < OpenAI::Internal::Type::BaseModel # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param required_action [OpenAI::Beta::Threads::Run::RequiredAction, nil] Details on the action required to continue the run. Will be `null` if no action + # @param required_action [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] Details on the action required to continue the run. Will be `null` if no action # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param started_at [Integer, nil] The Unix timestamp (in seconds) for when the run was started. # - # @param status [Symbol, OpenAI::Beta::Threads::RunStatus] The status of the run, which can be either `queued`, `in_progress`, `requires_ac + # @param status [Symbol, OpenAI::Models::Beta::Threads::RunStatus] The status of the run, which can be either `queued`, `in_progress`, `requires_ac # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tools [Array] The list of tools that the [assistant](https://platform.openai.com/docs/api-refe + # @param tools [Array] The list of tools that the [assistant](https://platform.openai.com/docs/api-refe # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # - # @param usage [OpenAI::Beta::Threads::Run::Usage, nil] Usage statistics related to the run. This value will be `null` if the run is not + # @param usage [OpenAI::Models::Beta::Threads::Run::Usage, nil] Usage statistics related to the run. This value will be `null` if the run is not # # @param temperature [Float, nil] The sampling temperature used for this run. If not set, defaults to 1. # @@ -280,28 +280,28 @@ class Run < OpenAI::Internal::Type::BaseModel # # @param object [Symbol, :"thread.run"] The object type, which is always `thread.run`. - # @see OpenAI::Beta::Threads::Run#incomplete_details + # @see OpenAI::Models::Beta::Threads::Run#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute reason # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. # - # @return [Symbol, OpenAI::Beta::Threads::Run::IncompleteDetails::Reason, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason, nil] optional :reason, enum: -> { OpenAI::Beta::Threads::Run::IncompleteDetails::Reason } # @!method initialize(reason: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Run::IncompleteDetails} for more details. + # {OpenAI::Models::Beta::Threads::Run::IncompleteDetails} for more details. # # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. # - # @param reason [Symbol, OpenAI::Beta::Threads::Run::IncompleteDetails::Reason] The reason why the run is incomplete. This will point to which specific token li + # @param reason [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] The reason why the run is incomplete. This will point to which specific token li # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. # - # @see OpenAI::Beta::Threads::Run::IncompleteDetails#reason + # @see OpenAI::Models::Beta::Threads::Run::IncompleteDetails#reason module Reason extend OpenAI::Internal::Type::Enum @@ -313,12 +313,12 @@ module Reason end end - # @see OpenAI::Beta::Threads::Run#last_error + # @see OpenAI::Models::Beta::Threads::Run#last_error class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # - # @return [Symbol, OpenAI::Beta::Threads::Run::LastError::Code] + # @return [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] required :code, enum: -> { OpenAI::Beta::Threads::Run::LastError::Code } # @!attribute message @@ -330,13 +330,13 @@ class LastError < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:) # The last error associated with this run. Will be `null` if there are no errors. # - # @param code [Symbol, OpenAI::Beta::Threads::Run::LastError::Code] One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. + # @param code [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # # @param message [String] A human-readable description of the error. # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # - # @see OpenAI::Beta::Threads::Run::LastError#code + # @see OpenAI::Models::Beta::Threads::Run::LastError#code module Code extend OpenAI::Internal::Type::Enum @@ -349,12 +349,12 @@ module Code end end - # @see OpenAI::Beta::Threads::Run#required_action + # @see OpenAI::Models::Beta::Threads::Run#required_action class RequiredAction < OpenAI::Internal::Type::BaseModel # @!attribute submit_tool_outputs # Details on the tool outputs needed for this run to continue. # - # @return [OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] + # @return [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] required :submit_tool_outputs, -> { OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs @@ -370,16 +370,16 @@ class RequiredAction < OpenAI::Internal::Type::BaseModel # Details on the action required to continue the run. Will be `null` if no action # is required. # - # @param submit_tool_outputs [OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] Details on the tool outputs needed for this run to continue. + # @param submit_tool_outputs [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] Details on the tool outputs needed for this run to continue. # # @param type [Symbol, :submit_tool_outputs] For now, this is always `submit_tool_outputs`. - # @see OpenAI::Beta::Threads::Run::RequiredAction#submit_tool_outputs + # @see OpenAI::Models::Beta::Threads::Run::RequiredAction#submit_tool_outputs class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # A list of the relevant tool calls. # - # @return [Array] + # @return [Array] required :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] @@ -388,11 +388,57 @@ class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel # @!method initialize(tool_calls:) # Details on the tool outputs needed for this run to continue. # - # @param tool_calls [Array] A list of the relevant tool calls. + # @param tool_calls [Array] A list of the relevant tool calls. end end - # @see OpenAI::Beta::Threads::Run#usage + # @see OpenAI::Models::Beta::Threads::Run#truncation_strategy + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @return [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] + required :type, enum: -> { OpenAI::Beta::Threads::Run::TruncationStrategy::Type } + + # @!attribute last_messages + # The number of most recent messages from the thread when constructing the context + # for the run. + # + # @return [Integer, nil] + optional :last_messages, Integer, nil?: true + + # @!method initialize(type:, last_messages: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::Run::TruncationStrategy} for more details. + # + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # + # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @see OpenAI::Models::Beta::Threads::Run::TruncationStrategy#type + module Type + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + LAST_MESSAGES = :last_messages + + # @!method self.values + # @return [Array] + end + end + + # @see OpenAI::Models::Beta::Threads::Run#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens # Number of completion tokens used over the course of the run. diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 17ab91c4..9efd3f28 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -28,7 +28,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Beta::Threads::Runs::RunStepInclude] } @@ -43,7 +43,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute additional_messages # Adds additional messages to the thread before creating the run. # - # @return [Array, nil] + # @return [Array, nil] optional :additional_messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage] @@ -95,7 +95,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # model associated with the assistant. If not, the model associated with the # assistant will be used. # - # @return [String, Symbol, OpenAI::ChatModel, nil] + # @return [String, Symbol, OpenAI::Models::ChatModel, nil] optional :model, union: -> { OpenAI::Beta::Threads::RunCreateParams::Model }, nil?: true # @!attribute parallel_tool_calls @@ -114,7 +114,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format @@ -139,7 +139,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # indicates the generation exceeded `max_tokens` or the conversation exceeded the # max context length. # - # @return [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] + # @return [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] optional :response_format, union: -> { OpenAI::Beta::AssistantResponseFormatOption }, nil?: true # @!attribute temperature @@ -159,14 +159,14 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # `{"type": "function", "function": {"name": "my_function"}}` forces the model to # call that tool. # - # @return [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] + # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Beta::AssistantToolChoiceOption }, nil?: true # @!attribute tools # Override the tools the assistant can use for this run. This is useful for # modifying the behavior on a per-run basis. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool] @@ -187,8 +187,10 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. # - # @return [OpenAI::Beta::TruncationObject, nil] - optional :truncation_strategy, -> { OpenAI::Beta::TruncationObject }, nil?: true + # @return [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + optional :truncation_strategy, + -> { OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy }, + nil?: true # @!method initialize(assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) # Some parameter documentations has been truncated, see @@ -196,11 +198,11 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # # @param assistant_id [String] The ID of the [assistant](https://platform.openai.com/docs/api-reference/assista # - # @param include [Array] A list of additional fields to include in the response. Currently the only suppo + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo # # @param additional_instructions [String, nil] Appends additional instructions at the end of the instructions for the run. This # - # @param additional_messages [Array, nil] Adds additional messages to the thread before creating the run. + # @param additional_messages [Array, nil] Adds additional messages to the thread before creating the run. # # @param instructions [String, nil] Overrides the [instructions](https://platform.openai.com/docs/api-reference/assi # @@ -210,23 +212,23 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -234,7 +236,7 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!attribute content # The text contents of the message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content @@ -248,13 +250,13 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @return [Symbol, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role] + # @return [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] required :role, enum: -> { OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role } # @!attribute attachments # A list of files attached to the message, and the tools they should be added to. # - # @return [Array, nil] + # @return [Array, nil] optional :attachments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment] @@ -274,19 +276,20 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, role:, attachments: nil, metadata: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage} for more details. + # {OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage} for more + # details. # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role] The role of the entity that is creating the message. Allowed values include: + # @param role [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # The text contents of the message. # - # @see OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage#content + # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#content module Content extend OpenAI::Internal::Type::Union @@ -294,27 +297,10 @@ module Content variant String # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). - variant -> { - OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray - } + variant -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) - end - end + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] MessageContentPartParamArray = @@ -328,7 +314,7 @@ module Content # - `assistant`: Indicates the message is generated by the assistant. Use this # value to insert messages from the assistant into the conversation. # - # @see OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage#role + # @see OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage#role module Role extend OpenAI::Internal::Type::Enum @@ -349,7 +335,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute tools # The tools to add this file to. # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] @@ -358,7 +344,7 @@ class Attachment < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] The ID of the file to attach to the message. # - # @param tools [Array] The tools to add this file to. + # @param tools [Array] The tools to add this file to. module Tool extend OpenAI::Internal::Type::Union @@ -384,16 +370,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::CodeInterpreterTool, OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch - ) - end - end + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] end end end @@ -411,10 +388,52 @@ module Model variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel)] + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] + end + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @return [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] + required :type, enum: -> { OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type } + + # @!attribute last_messages + # The number of most recent messages from the thread when constructing the context + # for the run. + # + # @return [Integer, nil] + optional :last_messages, Integer, nil?: true + + # @!method initialize(type:, last_messages: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy} for more + # details. + # + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] The truncation strategy to use for the thread. The default is `auto`. If set to + # + # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + # + # @see OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy#type + module Type + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + LAST_MESSAGES = :last_messages - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index 0399613f..5f50a4bb 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -38,7 +38,7 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Beta::Threads::RunListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order, nil] optional :order, enum: -> { OpenAI::Beta::Threads::RunListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) @@ -51,7 +51,7 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 85f8acff..d37572fd 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -19,7 +19,7 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @!attribute tool_outputs # A list of tools for which the outputs are being submitted. # - # @return [Array] + # @return [Array] required :tool_outputs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] @@ -28,7 +28,7 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @!method initialize(thread_id:, tool_outputs:, request_options: {}) # @param thread_id [String] # - # @param tool_outputs [Array] A list of tools for which the outputs are being submitted. + # @param tool_outputs [Array] A list of tools for which the outputs are being submitted. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -48,7 +48,7 @@ class ToolOutput < OpenAI::Internal::Type::BaseModel # @!method initialize(output: nil, tool_call_id: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput} for more + # {OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput} for more # details. # # @param output [String] The output of the tool call to be submitted to continue the run. diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index 8ef2e615..b00adb80 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -20,17 +20,17 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # @!attribute image # - # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, nil] optional :image, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image } # @!method initialize(index:, image: nil, type: :image) # @param index [Integer] The index of the output in the outputs array. # - # @param image [OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] + # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] # # @param type [Symbol, :image] Always `image`. - # @see OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage#image + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage#image class Image < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the @@ -41,8 +41,8 @@ class Image < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image} for more - # details. + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image} for + # more details. # # @param file_id [String] The [file](https://platform.openai.com/docs/api-reference/files) ID of the image end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 5d03c473..708b96f8 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -15,7 +15,7 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # The Code Interpreter tool call definition. # - # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] + # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] required :code_interpreter, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter @@ -30,17 +30,17 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, code_interpreter:, type: :code_interpreter) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall} for more details. + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall} for more details. # # Details of the Code Interpreter tool call the run step was involved in. # # @param id [String] The ID of the tool call. # - # @param code_interpreter [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] The Code Interpreter tool call definition. + # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] The Code Interpreter tool call definition. # # @param type [Symbol, :code_interpreter] The type of tool call. This is always going to be `code_interpreter` for this ty - # @see OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute input # The input to the Code Interpreter tool call. @@ -53,7 +53,7 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # or more items, including text (`logs`) or images (`image`). Each of these are # represented by a different object type. # - # @return [Array] + # @return [Array] required :outputs, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] @@ -61,14 +61,14 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, outputs:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter} for more - # details. + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter} + # for more details. # # The Code Interpreter tool call definition. # # @param input [String] The input to the Code Interpreter tool call. # - # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one + # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -111,7 +111,7 @@ class Logs < OpenAI::Internal::Type::BaseModel class Image < OpenAI::Internal::Type::BaseModel # @!attribute image # - # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] + # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] required :image, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image @@ -124,11 +124,11 @@ class Image < OpenAI::Internal::Type::BaseModel required :type, const: :image # @!method initialize(image:, type: :image) - # @param image [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] + # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] # # @param type [Symbol, :image] Always `image`. - # @see OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image class Image < OpenAI::Internal::Type::BaseModel # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the @@ -139,7 +139,7 @@ class Image < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image} + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image} # for more details. # # @param file_id [String] The [file](https://platform.openai.com/docs/api-reference/files) ID of the image @@ -147,16 +147,7 @@ class Image < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 6ce54421..b123ac62 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -28,13 +28,14 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute code_interpreter # The Code Interpreter tool call definition. # - # @return [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter } # @!method initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta} for more details. + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta} for more + # details. # # Details of the Code Interpreter tool call the run step was involved in. # @@ -42,11 +43,11 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # # @param id [String] The ID of the tool call. # - # @param code_interpreter [OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] The Code Interpreter tool call definition. + # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] The Code Interpreter tool call definition. # # @param type [Symbol, :code_interpreter] The type of tool call. This is always going to be `code_interpreter` for this ty - # @see OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter + # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!attribute input # The input to the Code Interpreter tool call. @@ -59,7 +60,7 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # or more items, including text (`logs`) or images (`image`). Each of these are # represented by a different object type. # - # @return [Array, nil] + # @return [Array, nil] optional :outputs, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] @@ -67,14 +68,14 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @!method initialize(input: nil, outputs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter} for - # more details. + # {OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter} + # for more details. # # The Code Interpreter tool call definition. # # @param input [String] The input to the Code Interpreter tool call. # - # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one + # @param outputs [Array] The outputs from the Code Interpreter tool call. Code Interpreter can output one # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -88,16 +89,7 @@ module Output variant :image, -> { OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage - ) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage)] end end end diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index b8d0d149..68036d67 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -15,7 +15,7 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!attribute file_search # For now, this is always going to be an empty object. # - # @return [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch] + # @return [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] required :file_search, -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch } # @!attribute type @@ -27,27 +27,27 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, file_search:, type: :file_search) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FileSearchToolCall} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall} for more details. # # @param id [String] The ID of the tool call object. # - # @param file_search [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch] For now, this is always going to be an empty object. + # @param file_search [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] For now, this is always going to be an empty object. # # @param type [Symbol, :file_search] The type of tool call. This is always going to be `file_search` for this type of - # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall#file_search + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall#file_search class FileSearch < OpenAI::Internal::Type::BaseModel # @!attribute ranking_options # The ranking options for the file search. # - # @return [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions } # @!attribute results # The results of the file search. # - # @return [Array, nil] + # @return [Array, nil] optional :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] @@ -56,17 +56,17 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(ranking_options: nil, results: nil) # For now, this is always going to be an empty object. # - # @param ranking_options [OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] The ranking options for the file search. + # @param ranking_options [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] The ranking options for the file search. # - # @param results [Array] The results of the file search. + # @param results [Array] The results of the file search. - # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] required :ranker, enum: -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker @@ -81,19 +81,19 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(ranker:, score_threshold:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions} + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions} # for more details. # # The ranking options for the file search. # - # @param ranker [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank + # @param ranker [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] The ranker to use for the file search. If not specified will use the `auto` rank # # @param score_threshold [Float] The score threshold for the file search. All values must be a floating point num # The ranker to use for the file search. If not specified will use the `auto` # ranker. # - # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions#ranker + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum @@ -129,7 +129,7 @@ class Result < OpenAI::Internal::Type::BaseModel # The content of the result that was found. The content is only included if # requested via the include query parameter. # - # @return [Array, nil] + # @return [Array, nil] optional :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] @@ -137,8 +137,8 @@ class Result < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:, file_name:, score:, content: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result} for more - # details. + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result} + # for more details. # # A result instance of the file search. # @@ -148,7 +148,7 @@ class Result < OpenAI::Internal::Type::BaseModel # # @param score [Float] The score of the result. All values must be a floating point number between 0 an # - # @param content [Array] The content of the result that was found. The content is only included if reques + # @param content [Array] The content of the result that was found. The content is only included if reques class Content < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -160,7 +160,7 @@ class Content < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of the content. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type, nil] optional :type, enum: -> { OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type @@ -169,11 +169,11 @@ class Content < OpenAI::Internal::Type::BaseModel # @!method initialize(text: nil, type: nil) # @param text [String] The text content of the file. # - # @param type [Symbol, OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] The type of the content. + # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] The type of the content. # The type of the content. # - # @see OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content#type + # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb index a0896ee1..13c9f547 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb @@ -33,7 +33,7 @@ class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(file_search:, index:, id: nil, type: :file_search) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta} for more details. # # @param file_search [Object] For now, this is always going to be an empty object. # diff --git a/lib/openai/models/beta/threads/runs/function_tool_call.rb b/lib/openai/models/beta/threads/runs/function_tool_call.rb index ce1e3ad3..de633613 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call.rb @@ -15,7 +15,7 @@ class FunctionToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # The definition of the function that was called. # - # @return [OpenAI::Beta::Threads::Runs::FunctionToolCall::Function] + # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] required :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCall::Function } # @!attribute type @@ -27,15 +27,15 @@ class FunctionToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, function:, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FunctionToolCall} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCall} for more details. # # @param id [String] The ID of the tool call object. # - # @param function [OpenAI::Beta::Threads::Runs::FunctionToolCall::Function] The definition of the function that was called. + # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] The definition of the function that was called. # # @param type [Symbol, :function] The type of tool call. This is always going to be `function` for this type of to - # @see OpenAI::Beta::Threads::Runs::FunctionToolCall#function + # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments passed to the function. @@ -59,7 +59,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:, output:) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FunctionToolCall::Function} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function} for more + # details. # # The definition of the function that was called. # diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index 9dc353ce..ad8391bb 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -28,22 +28,22 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # @!attribute function # The definition of the function that was called. # - # @return [OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, nil] optional :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function } # @!method initialize(index:, id: nil, function: nil, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FunctionToolCallDelta} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta} for more details. # # @param index [Integer] The index of the tool call in the tool calls array. # # @param id [String] The ID of the tool call object. # - # @param function [OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function] The definition of the function that was called. + # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] The definition of the function that was called. # # @param type [Symbol, :function] The type of tool call. This is always going to be `function` for this type of to - # @see OpenAI::Beta::Threads::Runs::FunctionToolCallDelta#function + # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments passed to the function. @@ -67,7 +67,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments: nil, name: nil, output: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function} for more details. + # {OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function} for more + # details. # # The definition of the function that was called. # diff --git a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb index f39e253b..0b5b6ac9 100644 --- a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb +++ b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb @@ -8,7 +8,7 @@ module Runs class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel # @!attribute message_creation # - # @return [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] + # @return [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] required :message_creation, -> { OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation } @@ -21,11 +21,11 @@ class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(message_creation:, type: :message_creation) # Details of the message creation by the run step. # - # @param message_creation [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] + # @param message_creation [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] # # @param type [Symbol, :message_creation] Always `message_creation`. - # @see OpenAI::Beta::Threads::Runs::MessageCreationStepDetails#message_creation + # @see OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel # @!attribute message_id # The ID of the message that was created by this run step. diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index 656fc313..fde3abeb 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -56,7 +56,7 @@ class RunStep < OpenAI::Internal::Type::BaseModel # The last error associated with this run step. Will be `null` if there are no # errors. # - # @return [OpenAI::Beta::Threads::Runs::RunStep::LastError, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] required :last_error, -> { OpenAI::Beta::Threads::Runs::RunStep::LastError }, nil?: true # @!attribute metadata @@ -87,13 +87,13 @@ class RunStep < OpenAI::Internal::Type::BaseModel # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Status] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] required :status, enum: -> { OpenAI::Beta::Threads::Runs::RunStep::Status } # @!attribute step_details # The details of the run step. # - # @return [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails] + # @return [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] required :step_details, union: -> { OpenAI::Beta::Threads::Runs::RunStep::StepDetails } # @!attribute thread_id @@ -106,19 +106,19 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of run step, which can be either `message_creation` or `tool_calls`. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Type] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] required :type, enum: -> { OpenAI::Beta::Threads::Runs::RunStep::Type } # @!attribute usage # Usage statistics related to the run step. This value will be `null` while the # run step's status is `in_progress`. # - # @return [OpenAI::Beta::Threads::Runs::RunStep::Usage, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] required :usage, -> { OpenAI::Beta::Threads::Runs::RunStep::Usage }, nil?: true # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expired_at:, failed_at:, last_error:, metadata:, run_id:, status:, step_details:, thread_id:, type:, usage:, object: :"thread.run.step") # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::RunStep} for more details. + # {OpenAI::Models::Beta::Threads::Runs::RunStep} for more details. # # Represents a step in execution of a run. # @@ -136,30 +136,30 @@ class RunStep < OpenAI::Internal::Type::BaseModel # # @param failed_at [Integer, nil] The Unix timestamp (in seconds) for when the run step failed. # - # @param last_error [OpenAI::Beta::Threads::Runs::RunStep::LastError, nil] The last error associated with this run step. Will be `null` if there are no err + # @param last_error [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] The last error associated with this run step. Will be `null` if there are no err # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param run_id [String] The ID of the [run](https://platform.openai.com/docs/api-reference/runs) that th # - # @param status [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Status] The status of the run step, which can be either `in_progress`, `cancelled`, `fai + # @param status [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] The status of the run step, which can be either `in_progress`, `cancelled`, `fai # - # @param step_details [OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails] The details of the run step. + # @param step_details [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] The details of the run step. # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t # - # @param type [Symbol, OpenAI::Beta::Threads::Runs::RunStep::Type] The type of run step, which can be either `message_creation` or `tool_calls`. + # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] The type of run step, which can be either `message_creation` or `tool_calls`. # - # @param usage [OpenAI::Beta::Threads::Runs::RunStep::Usage, nil] Usage statistics related to the run step. This value will be `null` while the ru + # @param usage [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] Usage statistics related to the run step. This value will be `null` while the ru # # @param object [Symbol, :"thread.run.step"] The object type, which is always `thread.run.step`. - # @see OpenAI::Beta::Threads::Runs::RunStep#last_error + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#last_error class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error` or `rate_limit_exceeded`. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::RunStep::LastError::Code] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] required :code, enum: -> { OpenAI::Beta::Threads::Runs::RunStep::LastError::Code } # @!attribute message @@ -172,13 +172,13 @@ class LastError < OpenAI::Internal::Type::BaseModel # The last error associated with this run step. Will be `null` if there are no # errors. # - # @param code [Symbol, OpenAI::Beta::Threads::Runs::RunStep::LastError::Code] One of `server_error` or `rate_limit_exceeded`. + # @param code [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] One of `server_error` or `rate_limit_exceeded`. # # @param message [String] A human-readable description of the error. # One of `server_error` or `rate_limit_exceeded`. # - # @see OpenAI::Beta::Threads::Runs::RunStep::LastError#code + # @see OpenAI::Models::Beta::Threads::Runs::RunStep::LastError#code module Code extend OpenAI::Internal::Type::Enum @@ -193,7 +193,7 @@ module Code # The status of the run step, which can be either `in_progress`, `cancelled`, # `failed`, `completed`, or `expired`. # - # @see OpenAI::Beta::Threads::Runs::RunStep#status + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#status module Status extend OpenAI::Internal::Type::Enum @@ -209,7 +209,7 @@ module Status # The details of the run step. # - # @see OpenAI::Beta::Threads::Runs::RunStep#step_details + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#step_details module StepDetails extend OpenAI::Internal::Type::Union @@ -222,21 +222,12 @@ module StepDetails variant :tool_calls, -> { OpenAI::Beta::Threads::Runs::ToolCallsStepDetails } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Beta::Threads::Runs::ToolCallsStepDetails)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Beta::Threads::Runs::ToolCallsStepDetails - ) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails)] end # The type of run step, which can be either `message_creation` or `tool_calls`. # - # @see OpenAI::Beta::Threads::Runs::RunStep#type + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#type module Type extend OpenAI::Internal::Type::Enum @@ -247,7 +238,7 @@ module Type # @return [Array] end - # @see OpenAI::Beta::Threads::Runs::RunStep#usage + # @see OpenAI::Models::Beta::Threads::Runs::RunStep#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens # Number of completion tokens used over the course of the run step. diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index a4848a8c..3dd76094 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -9,17 +9,17 @@ class RunStepDelta < OpenAI::Internal::Type::BaseModel # @!attribute step_details # The details of the run step. # - # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject, nil] optional :step_details, union: -> { OpenAI::Beta::Threads::Runs::RunStepDelta::StepDetails } # @!method initialize(step_details: nil) # The delta containing the fields that have changed on the run step. # - # @param step_details [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject] The details of the run step. + # @param step_details [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] The details of the run step. # The details of the run step. # - # @see OpenAI::Beta::Threads::Runs::RunStepDelta#step_details + # @see OpenAI::Models::Beta::Threads::Runs::RunStepDelta#step_details module StepDetails extend OpenAI::Internal::Type::Union @@ -32,16 +32,7 @@ module StepDetails variant :tool_calls, -> { OpenAI::Beta::Threads::Runs::ToolCallDeltaObject } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Beta::Threads::Runs::ToolCallDeltaObject)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Beta::Threads::Runs::ToolCallDeltaObject - ) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject)] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb index 19c633c5..abca9d4b 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb @@ -15,7 +15,7 @@ class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # @!attribute delta # The delta containing the fields that have changed on the run step. # - # @return [OpenAI::Beta::Threads::Runs::RunStepDelta] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] required :delta, -> { OpenAI::Beta::Threads::Runs::RunStepDelta } # @!attribute object @@ -30,7 +30,7 @@ class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param id [String] The identifier of the run step, which can be referenced in API endpoints. # - # @param delta [OpenAI::Beta::Threads::Runs::RunStepDelta] The delta containing the fields that have changed on the run step. + # @param delta [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] The delta containing the fields that have changed on the run step. # # @param object [Symbol, :"thread.run.step.delta"] The object type, which is always `thread.run.step.delta`. end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index 1c617d09..5cc90d66 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -14,7 +14,7 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @!attribute message_creation # - # @return [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, nil] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, nil] optional :message_creation, -> { OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation @@ -23,11 +23,11 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @!method initialize(message_creation: nil, type: :message_creation) # Details of the message creation by the run step. # - # @param message_creation [OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] + # @param message_creation [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] # # @param type [Symbol, :message_creation] Always `message_creation`. - # @see OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation + # @see OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel # @!attribute message_id # The ID of the message that was created by this run step. diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index e5251302..2a263bdc 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -42,7 +42,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Beta::Threads::Runs::RunStepInclude] } @@ -57,7 +57,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::Beta::Threads::Runs::StepListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order, nil] optional :order, enum: -> { OpenAI::Beta::Threads::Runs::StepListParams::Order } # @!method initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) @@ -70,11 +70,11 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place # - # @param include [Array] A list of additional fields to include in the response. Currently the only suppo + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::Threads::Runs::StepListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index f6238fd8..349177b1 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -29,7 +29,7 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) # for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Beta::Threads::Runs::RunStepInclude] } @@ -41,7 +41,7 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # # @param run_id [String] # - # @param include [Array] A list of additional fields to include in the response. Currently the only suppo + # @param include [Array] A list of additional fields to include in the response. Currently the only suppo # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index 9452c475..eb99ad15 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -19,17 +19,7 @@ module ToolCall variant :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCall } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Beta::Threads::Runs::FunctionToolCall)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Beta::Threads::Runs::FunctionToolCall - ) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall)] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index 10ad55e8..14cdb2c1 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -19,17 +19,7 @@ module ToolCallDelta variant :function, -> { OpenAI::Beta::Threads::Runs::FunctionToolCallDelta } # @!method self.variants - # @return [Array(OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Beta::Threads::Runs::FunctionToolCallDelta)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Beta::Threads::Runs::FunctionToolCallDelta - ) - end - end + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta)] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb index 668b9ec8..c6900dcc 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb @@ -17,17 +17,17 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::ToolCallDelta] } # @!method initialize(tool_calls: nil, type: :tool_calls) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::ToolCallDeltaObject} for more details. + # {OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject} for more details. # # Details of the tool call. # - # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit + # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit # # @param type [Symbol, :tool_calls] Always `tool_calls`. end diff --git a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb index 368ce9b5..9adca268 100644 --- a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +++ b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb @@ -11,7 +11,7 @@ class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. # - # @return [Array] + # @return [Array] required :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Runs::ToolCall] @@ -25,11 +25,11 @@ class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(tool_calls:, type: :tool_calls) # Some parameter documentations has been truncated, see - # {OpenAI::Beta::Threads::Runs::ToolCallsStepDetails} for more details. + # {OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails} for more details. # # Details of the tool call. # - # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit + # @param tool_calls [Array] An array of tool calls the run step was involved in. These can be associated wit # # @param type [Symbol, :tool_calls] Always `tool_calls`. end diff --git a/lib/openai/models/beta/threads/text.rb b/lib/openai/models/beta/threads/text.rb index 180b3abb..a0247200 100644 --- a/lib/openai/models/beta/threads/text.rb +++ b/lib/openai/models/beta/threads/text.rb @@ -7,7 +7,7 @@ module Threads class Text < OpenAI::Internal::Type::BaseModel # @!attribute annotations # - # @return [Array] + # @return [Array] required :annotations, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::Annotation] @@ -20,7 +20,7 @@ class Text < OpenAI::Internal::Type::BaseModel required :value, String # @!method initialize(annotations:, value:) - # @param annotations [Array] + # @param annotations [Array] # # @param value [String] The data that makes up the text. end diff --git a/lib/openai/models/beta/threads/text_content_block.rb b/lib/openai/models/beta/threads/text_content_block.rb index 73b0bd54..c1c391fa 100644 --- a/lib/openai/models/beta/threads/text_content_block.rb +++ b/lib/openai/models/beta/threads/text_content_block.rb @@ -7,7 +7,7 @@ module Threads class TextContentBlock < OpenAI::Internal::Type::BaseModel # @!attribute text # - # @return [OpenAI::Beta::Threads::Text] + # @return [OpenAI::Models::Beta::Threads::Text] required :text, -> { OpenAI::Beta::Threads::Text } # @!attribute type @@ -19,7 +19,7 @@ class TextContentBlock < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :text) # The text content that is part of a message. # - # @param text [OpenAI::Beta::Threads::Text] + # @param text [OpenAI::Models::Beta::Threads::Text] # # @param type [Symbol, :text] Always `text`. end diff --git a/lib/openai/models/beta/threads/text_delta.rb b/lib/openai/models/beta/threads/text_delta.rb index 63ad3975..ef33693b 100644 --- a/lib/openai/models/beta/threads/text_delta.rb +++ b/lib/openai/models/beta/threads/text_delta.rb @@ -7,7 +7,7 @@ module Threads class TextDelta < OpenAI::Internal::Type::BaseModel # @!attribute annotations # - # @return [Array, nil] + # @return [Array, nil] optional :annotations, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::Threads::AnnotationDelta] } @@ -18,7 +18,7 @@ class TextDelta < OpenAI::Internal::Type::BaseModel optional :value, String # @!method initialize(annotations: nil, value: nil) - # @param annotations [Array] + # @param annotations [Array] # # @param value [String] The data that makes up the text. end diff --git a/lib/openai/models/beta/threads/text_delta_block.rb b/lib/openai/models/beta/threads/text_delta_block.rb index 126aefbd..f55a50f7 100644 --- a/lib/openai/models/beta/threads/text_delta_block.rb +++ b/lib/openai/models/beta/threads/text_delta_block.rb @@ -19,7 +19,7 @@ class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # @!attribute text # - # @return [OpenAI::Beta::Threads::TextDelta, nil] + # @return [OpenAI::Models::Beta::Threads::TextDelta, nil] optional :text, -> { OpenAI::Beta::Threads::TextDelta } # @!method initialize(index:, text: nil, type: :text) @@ -27,7 +27,7 @@ class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # # @param index [Integer] The index of the content part in the message. # - # @param text [OpenAI::Beta::Threads::TextDelta] + # @param text [OpenAI::Models::Beta::Threads::TextDelta] # # @param type [Symbol, :text] Always `text`. end diff --git a/lib/openai/models/beta/truncation_object.rb b/lib/openai/models/beta/truncation_object.rb deleted file mode 100644 index c406a0b7..00000000 --- a/lib/openai/models/beta/truncation_object.rb +++ /dev/null @@ -1,52 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Beta - class TruncationObject < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - # - # @return [Symbol, OpenAI::Beta::TruncationObject::Type] - required :type, enum: -> { OpenAI::Beta::TruncationObject::Type } - - # @!attribute last_messages - # The number of most recent messages from the thread when constructing the context - # for the run. - # - # @return [Integer, nil] - optional :last_messages, Integer, nil?: true - - # @!method initialize(type:, last_messages: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Beta::TruncationObject} for more details. - # - # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - # - # @param type [Symbol, OpenAI::Beta::TruncationObject::Type] The truncation strategy to use for the thread. The default is `auto`. If set to - # - # @param last_messages [Integer, nil] The number of most recent messages from the thread when constructing the context - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - # - # @see OpenAI::Beta::TruncationObject#type - module Type - extend OpenAI::Internal::Type::Enum - - AUTO = :auto - LAST_MESSAGES = :last_messages - - # @!method self.values - # @return [Array] - end - end - end - end -end diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 8fb216ee..6f5b922c 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -17,7 +17,7 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # A list of chat completion choices. Can be more than one if `n` is greater # than 1. # - # @return [Array] + # @return [Array] required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletion::Choice] } # @!attribute created @@ -57,7 +57,7 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Chat::ChatCompletion::ServiceTier, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Chat::ChatCompletion::ServiceTier }, nil?: true # @!attribute system_fingerprint @@ -72,29 +72,29 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # @!attribute usage # Usage statistics for the completion request. # - # @return [OpenAI::CompletionUsage, nil] + # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::CompletionUsage } # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion") # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletion} for more details. + # {OpenAI::Models::Chat::ChatCompletion} for more details. # # Represents a chat completion response returned by model, based on the provided # input. # # @param id [String] A unique identifier for the chat completion. # - # @param choices [Array] A list of chat completion choices. Can be more than one if `n` is greater than 1 + # @param choices [Array] A list of chat completion choices. Can be more than one if `n` is greater than 1 # # @param created [Integer] The Unix timestamp (in seconds) of when the chat completion was created. # # @param model [String] The model used for the chat completion. # - # @param service_tier [Symbol, OpenAI::Chat::ChatCompletion::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. # - # @param usage [OpenAI::CompletionUsage] Usage statistics for the completion request. + # @param usage [OpenAI::Models::CompletionUsage] Usage statistics for the completion request. # # @param object [Symbol, :"chat.completion"] The object type, which is always `chat.completion`. @@ -107,7 +107,7 @@ class Choice < OpenAI::Internal::Type::BaseModel # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @return [Symbol, OpenAI::Chat::ChatCompletion::Choice::FinishReason] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] required :finish_reason, enum: -> { OpenAI::Chat::ChatCompletion::Choice::FinishReason } # @!attribute index @@ -119,26 +119,26 @@ class Choice < OpenAI::Internal::Type::BaseModel # @!attribute logprobs # Log probability information for the choice. # - # @return [OpenAI::Chat::ChatCompletion::Choice::Logprobs, nil] + # @return [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] required :logprobs, -> { OpenAI::Chat::ChatCompletion::Choice::Logprobs }, nil?: true # @!attribute message # A chat completion message generated by the model. # - # @return [OpenAI::Chat::ChatCompletionMessage] + # @return [OpenAI::Models::Chat::ChatCompletionMessage] required :message, -> { OpenAI::Chat::ChatCompletionMessage } # @!method initialize(finish_reason:, index:, logprobs:, message:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletion::Choice} for more details. + # {OpenAI::Models::Chat::ChatCompletion::Choice} for more details. # - # @param finish_reason [Symbol, OpenAI::Chat::ChatCompletion::Choice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model + # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model # # @param index [Integer] The index of the choice in the list of choices. # - # @param logprobs [OpenAI::Chat::ChatCompletion::Choice::Logprobs, nil] Log probability information for the choice. + # @param logprobs [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] Log probability information for the choice. # - # @param message [OpenAI::Chat::ChatCompletionMessage] A chat completion message generated by the model. + # @param message [OpenAI::Models::Chat::ChatCompletionMessage] A chat completion message generated by the model. # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -147,7 +147,7 @@ class Choice < OpenAI::Internal::Type::BaseModel # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @see OpenAI::Chat::ChatCompletion::Choice#finish_reason + # @see OpenAI::Models::Chat::ChatCompletion::Choice#finish_reason module FinishReason extend OpenAI::Internal::Type::Enum @@ -161,12 +161,12 @@ module FinishReason # @return [Array] end - # @see OpenAI::Chat::ChatCompletion::Choice#logprobs + # @see OpenAI::Models::Chat::ChatCompletion::Choice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of message content tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true @@ -174,7 +174,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute refusal # A list of message refusal tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :refusal, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true @@ -182,9 +182,9 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, refusal:) # Log probability information for the choice. # - # @param content [Array, nil] A list of message content tokens with log probability information. + # @param content [Array, nil] A list of message content tokens with log probability information. # - # @param refusal [Array, nil] A list of message refusal tokens with log probability information. + # @param refusal [Array, nil] A list of message refusal tokens with log probability information. end end @@ -206,7 +206,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @see OpenAI::Chat::ChatCompletion#service_tier + # @see OpenAI::Models::Chat::ChatCompletion#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index b49a8303..338351a8 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -14,14 +14,14 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @return [OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio, nil] + # @return [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] optional :audio, -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio }, nil?: true # @!attribute content # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. # - # @return [String, Array, nil] + # @return [String, Array, nil] optional :content, union: -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::Content @@ -34,7 +34,7 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @return [OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] + # @return [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] optional :function_call, -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall }, nil?: true @@ -55,7 +55,7 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessageToolCall] @@ -63,25 +63,25 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(audio: nil, content: nil, function_call: nil, name: nil, refusal: nil, tool_calls: nil, role: :assistant) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionAssistantMessageParam} for more details. + # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam} for more details. # # Messages sent by the model in response to user messages. # - # @param audio [OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio, nil] Data about a previous audio response from the model. + # @param audio [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] Data about a previous audio response from the model. # - # @param content [String, Array, nil] The contents of the assistant message. Required unless `tool_calls` or `function + # @param content [String, Array, nil] The contents of the assistant message. Required unless `tool_calls` or `function # - # @param function_call [OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] Deprecated and replaced by `tool_calls`. The name and arguments of a function th + # @param function_call [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] Deprecated and replaced by `tool_calls`. The name and arguments of a function th # # @param name [String] An optional name for the participant. Provides the model information to differen # # @param refusal [String, nil] The refusal message by the assistant. # - # @param tool_calls [Array] The tool calls generated by the model, such as function calls. + # @param tool_calls [Array] The tool calls generated by the model, such as function calls. # # @param role [Symbol, :assistant] The role of the messages author, in this case `assistant`. - # @see OpenAI::Chat::ChatCompletionAssistantMessageParam#audio + # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#audio class Audio < OpenAI::Internal::Type::BaseModel # @!attribute id # Unique identifier for a previous audio response from the model. @@ -91,7 +91,8 @@ class Audio < OpenAI::Internal::Type::BaseModel # @!method initialize(id:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio} for more details. + # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio} for more + # details. # # Data about a previous audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). @@ -102,7 +103,7 @@ class Audio < OpenAI::Internal::Type::BaseModel # The contents of the assistant message. Required unless `tool_calls` or # `function_call` is specified. # - # @see OpenAI::Chat::ChatCompletionAssistantMessageParam#content + # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -110,7 +111,7 @@ module Content variant String # An array of content parts with a defined type. Can be one or more of type `text`, or exactly one of type `refusal`. - variant -> { OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray } + variant -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPartArray } # Learn about # [text inputs](https://platform.openai.com/docs/guides/text-generation). @@ -125,32 +126,11 @@ module ArrayOfContentPart variant :refusal, -> { OpenAI::Chat::ChatCompletionContentPartRefusal } # @!method self.variants - # @return [Array(OpenAI::Chat::ChatCompletionContentPartText, OpenAI::Chat::ChatCompletionContentPartRefusal)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartRefusal - ) - end - end + # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal)] end # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartRefusal - )] - ) - end - end + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ArrayOfContentPartArray = @@ -161,7 +141,7 @@ module ArrayOfContentPart # @deprecated # - # @see OpenAI::Chat::ChatCompletionAssistantMessageParam#function_call + # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -180,8 +160,8 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall} for more - # details. + # {OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall} for + # more details. # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. diff --git a/lib/openai/models/chat/chat_completion_audio.rb b/lib/openai/models/chat/chat_completion_audio.rb index 861309b5..fae460ec 100644 --- a/lib/openai/models/chat/chat_completion_audio.rb +++ b/lib/openai/models/chat/chat_completion_audio.rb @@ -32,7 +32,7 @@ class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, data:, expires_at:, transcript:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionAudio} for more details. + # {OpenAI::Models::Chat::ChatCompletionAudio} for more details. # # If the audio output modality is requested, this object contains data about the # audio response from the model. diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 3ab3a1d0..46f1463f 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -8,32 +8,32 @@ class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. # - # @return [Symbol, OpenAI::Chat::ChatCompletionAudioParam::Format] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] required :format_, enum: -> { OpenAI::Chat::ChatCompletionAudioParam::Format }, api_name: :format # @!attribute voice # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. # - # @return [String, Symbol, OpenAI::Chat::ChatCompletionAudioParam::Voice] + # @return [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] required :voice, union: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice } # @!method initialize(format_:, voice:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionAudioParam} for more details. + # {OpenAI::Models::Chat::ChatCompletionAudioParam} for more details. # # Parameters for audio output. Required when audio output is requested with # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @param format_ [Symbol, OpenAI::Chat::ChatCompletionAudioParam::Format] Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, + # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, # - # @param voice [String, Symbol, OpenAI::Chat::ChatCompletionAudioParam::Voice] The voice the model uses to respond. Supported voices are + # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] The voice the model uses to respond. Supported voices are # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. # - # @see OpenAI::Chat::ChatCompletionAudioParam#format_ + # @see OpenAI::Models::Chat::ChatCompletionAudioParam#format_ module Format extend OpenAI::Internal::Type::Enum @@ -51,33 +51,33 @@ module Format # The voice the model uses to respond. Supported voices are `alloy`, `ash`, # `ballad`, `coral`, `echo`, `fable`, `nova`, `onyx`, `sage`, and `shimmer`. # - # @see OpenAI::Chat::ChatCompletionAudioParam#voice + # @see OpenAI::Models::Chat::ChatCompletionAudioParam#voice module Voice extend OpenAI::Internal::Type::Union variant String - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ALLOY } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ALLOY } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ASH } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ASH } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::BALLAD } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::BALLAD } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::CORAL } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::CORAL } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ECHO } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ECHO } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::FABLE } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::FABLE } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::ONYX } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::ONYX } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::NOVA } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::NOVA } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::SAGE } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SAGE } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::SHIMMER } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::SHIMMER } - variant const: -> { OpenAI::Chat::ChatCompletionAudioParam::Voice::VERSE } + variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::VERSE } # @!method self.variants # @return [Array(String, Symbol)] diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 8f94cd1e..9dfe771e 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -15,7 +15,7 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # greater than 1. Can also be empty for the last chunk if you set # `stream_options: {"include_usage": true}`. # - # @return [Array] + # @return [Array] required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionChunk::Choice] } # @!attribute created @@ -56,7 +56,7 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::ServiceTier, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Chat::ChatCompletionChunk::ServiceTier }, nil?: true # @!attribute system_fingerprint @@ -76,12 +76,12 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # **NOTE:** If the stream is interrupted or cancelled, you may not receive the # final usage chunk which contains the total token usage for the request. # - # @return [OpenAI::CompletionUsage, nil] + # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::CompletionUsage }, nil?: true # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion.chunk") # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionChunk} for more details. + # {OpenAI::Models::Chat::ChatCompletionChunk} for more details. # # Represents a streamed chunk of a chat completion response returned by the model, # based on the provided input. @@ -89,17 +89,17 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # # @param id [String] A unique identifier for the chat completion. Each chunk has the same ID. # - # @param choices [Array] A list of chat completion choices. Can contain more than one elements if `n` is + # @param choices [Array] A list of chat completion choices. Can contain more than one elements if `n` is # # @param created [Integer] The Unix timestamp (in seconds) of when the chat completion was created. Each ch # # @param model [String] The model to generate the completion. # - # @param service_tier [Symbol, OpenAI::Chat::ChatCompletionChunk::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. # - # @param usage [OpenAI::CompletionUsage, nil] An optional field that will only be present when you set + # @param usage [OpenAI::Models::CompletionUsage, nil] An optional field that will only be present when you set # # @param object [Symbol, :"chat.completion.chunk"] The object type, which is always `chat.completion.chunk`. @@ -107,7 +107,7 @@ class Choice < OpenAI::Internal::Type::BaseModel # @!attribute delta # A chat completion delta generated by streamed model responses. # - # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Delta] + # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] required :delta, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta } # @!attribute finish_reason @@ -118,7 +118,7 @@ class Choice < OpenAI::Internal::Type::BaseModel # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] required :finish_reason, enum: -> { OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason @@ -134,22 +134,22 @@ class Choice < OpenAI::Internal::Type::BaseModel # @!attribute logprobs # Log probability information for the choice. # - # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs, nil] + # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] optional :logprobs, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs }, nil?: true # @!method initialize(delta:, finish_reason:, index:, logprobs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionChunk::Choice} for more details. + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice} for more details. # - # @param delta [OpenAI::Chat::ChatCompletionChunk::Choice::Delta] A chat completion delta generated by streamed model responses. + # @param delta [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] A chat completion delta generated by streamed model responses. # - # @param finish_reason [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::FinishReason, nil] The reason the model stopped generating tokens. This will be `stop` if the model + # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] The reason the model stopped generating tokens. This will be `stop` if the model # # @param index [Integer] The index of the choice in the list of choices. # - # @param logprobs [OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs, nil] Log probability information for the choice. + # @param logprobs [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] Log probability information for the choice. - # @see OpenAI::Chat::ChatCompletionChunk::Choice#delta + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#delta class Delta < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the chunk message. @@ -163,7 +163,7 @@ class Delta < OpenAI::Internal::Type::BaseModel # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, nil] + # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, nil] optional :function_call, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall } # @!attribute refusal @@ -175,12 +175,12 @@ class Delta < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the author of this message. # - # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role, nil] optional :role, enum: -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role } # @!attribute tool_calls # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] @@ -188,23 +188,23 @@ class Delta < OpenAI::Internal::Type::BaseModel # @!method initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionChunk::Choice::Delta} for more details. + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta} for more details. # # A chat completion delta generated by streamed model responses. # # @param content [String, nil] The contents of the chunk message. # - # @param function_call [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th + # @param function_call [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th # # @param refusal [String, nil] The refusal message generated by the model. # - # @param role [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::Role] The role of the author of this message. + # @param role [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] The role of the author of this message. # - # @param tool_calls [Array] + # @param tool_calls [Array] # @deprecated # - # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta#function_call + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -223,8 +223,8 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments: nil, name: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall} for more - # details. + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall} for + # more details. # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. @@ -236,7 +236,7 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # The role of the author of this message. # - # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta#role + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#role module Role extend OpenAI::Internal::Type::Enum @@ -264,13 +264,13 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, nil] + # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, nil] optional :function, -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function } # @!attribute type # The type of the tool. Currently, only `function` is supported. # - # @return [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type, nil] optional :type, enum: -> { OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type } # @!method initialize(index:, id: nil, function: nil, type: nil) @@ -278,11 +278,11 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # # @param id [String] The ID of the tool call. # - # @param function [OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] + # @param function [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] # - # @param type [Symbol, OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] The type of the tool. Currently, only `function` is supported. + # @param type [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] The type of the tool. Currently, only `function` is supported. - # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -301,8 +301,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments: nil, name: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function} for more - # details. + # {OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function} + # for more details. # # @param arguments [String] The arguments to call the function with, as generated by the model in JSON forma # @@ -311,7 +311,7 @@ class Function < OpenAI::Internal::Type::BaseModel # The type of the tool. Currently, only `function` is supported. # - # @see OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#type + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#type module Type extend OpenAI::Internal::Type::Enum @@ -330,7 +330,7 @@ module Type # model called a tool, or `function_call` (deprecated) if the model called a # function. # - # @see OpenAI::Chat::ChatCompletionChunk::Choice#finish_reason + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#finish_reason module FinishReason extend OpenAI::Internal::Type::Enum @@ -344,12 +344,12 @@ module FinishReason # @return [Array] end - # @see OpenAI::Chat::ChatCompletionChunk::Choice#logprobs + # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute content # A list of message content tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true @@ -357,7 +357,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute refusal # A list of message refusal tokens with log probability information. # - # @return [Array, nil] + # @return [Array, nil] required :refusal, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob] }, nil?: true @@ -365,9 +365,9 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, refusal:) # Log probability information for the choice. # - # @param content [Array, nil] A list of message content tokens with log probability information. + # @param content [Array, nil] A list of message content tokens with log probability information. # - # @param refusal [Array, nil] A list of message refusal tokens with log probability information. + # @param refusal [Array, nil] A list of message refusal tokens with log probability information. end end @@ -389,7 +389,7 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @see OpenAI::Chat::ChatCompletionChunk#service_tier + # @see OpenAI::Models::Chat::ChatCompletionChunk#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index a79c7dbf..ba2d6918 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -25,7 +25,7 @@ module ChatCompletionContentPart class File < OpenAI::Internal::Type::BaseModel # @!attribute file # - # @return [OpenAI::Chat::ChatCompletionContentPart::File::File] + # @return [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] required :file, -> { OpenAI::Chat::ChatCompletionContentPart::File::File } # @!attribute type @@ -38,11 +38,11 @@ class File < OpenAI::Internal::Type::BaseModel # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text # generation. # - # @param file [OpenAI::Chat::ChatCompletionContentPart::File::File] + # @param file [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] # # @param type [Symbol, :file] The type of the content part. Always `file`. - # @see OpenAI::Chat::ChatCompletionContentPart::File#file + # @see OpenAI::Models::Chat::ChatCompletionContentPart::File#file class File < OpenAI::Internal::Type::BaseModel # @!attribute file_data # The base64 encoded file data, used when passing the file to the model as a @@ -65,7 +65,7 @@ class File < OpenAI::Internal::Type::BaseModel # @!method initialize(file_data: nil, file_id: nil, filename: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionContentPart::File::File} for more details. + # {OpenAI::Models::Chat::ChatCompletionContentPart::File::File} for more details. # # @param file_data [String] The base64 encoded file data, used when passing the file to the model # @@ -76,18 +76,7 @@ class File < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Chat::ChatCompletionContentPartText, OpenAI::Chat::ChatCompletionContentPartImage, OpenAI::Chat::ChatCompletionContentPartInputAudio, OpenAI::Chat::ChatCompletionContentPart::File)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartImage, - OpenAI::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Chat::ChatCompletionContentPart::File - ) - end - end + # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File)] end end diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 9288c5d4..f5971945 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -6,7 +6,7 @@ module Chat class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel # @!attribute image_url # - # @return [OpenAI::Chat::ChatCompletionContentPartImage::ImageURL] + # @return [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] required :image_url, -> { OpenAI::Chat::ChatCompletionContentPartImage::ImageURL } # @!attribute type @@ -18,11 +18,11 @@ class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, type: :image_url) # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). # - # @param image_url [OpenAI::Chat::ChatCompletionContentPartImage::ImageURL] + # @param image_url [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] # # @param type [Symbol, :image_url] The type of the content part. - # @see OpenAI::Chat::ChatCompletionContentPartImage#image_url + # @see OpenAI::Models::Chat::ChatCompletionContentPartImage#image_url class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute url # Either a URL of the image or the base64 encoded image data. @@ -34,21 +34,22 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # - # @return [Symbol, OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail, nil] optional :detail, enum: -> { OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail } # @!method initialize(url:, detail: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionContentPartImage::ImageURL} for more details. + # {OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL} for more + # details. # # @param url [String] Either a URL of the image or the base64 encoded image data. # - # @param detail [Symbol, OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::Detail] Specifies the detail level of the image. Learn more in the [Vision guide](https: + # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] Specifies the detail level of the image. Learn more in the [Vision guide](https: # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # - # @see OpenAI::Chat::ChatCompletionContentPartImage::ImageURL#detail + # @see OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 86a9a7d5..d8f86fb2 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -6,7 +6,7 @@ module Chat class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel # @!attribute input_audio # - # @return [OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio] + # @return [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] required :input_audio, -> { OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio } # @!attribute type @@ -18,11 +18,11 @@ class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(input_audio:, type: :input_audio) # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). # - # @param input_audio [OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio] + # @param input_audio [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] # # @param type [Symbol, :input_audio] The type of the content part. Always `input_audio`. - # @see OpenAI::Chat::ChatCompletionContentPartInputAudio#input_audio + # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio#input_audio class InputAudio < OpenAI::Internal::Type::BaseModel # @!attribute data # Base64 encoded audio data. @@ -33,23 +33,23 @@ class InputAudio < OpenAI::Internal::Type::BaseModel # @!attribute format_ # The format of the encoded audio data. Currently supports "wav" and "mp3". # - # @return [Symbol, OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] required :format_, enum: -> { OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format }, api_name: :format # @!method initialize(data:, format_:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio} for more + # {OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio} for more # details. # # @param data [String] Base64 encoded audio data. # - # @param format_ [Symbol, OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] The format of the encoded audio data. Currently supports "wav" and "mp3". + # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] The format of the encoded audio data. Currently supports "wav" and "mp3". # The format of the encoded audio data. Currently supports "wav" and "mp3". # - # @see OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio#format_ + # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio#format_ module Format extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 492403d8..828698cb 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -7,7 +7,7 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the developer message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content } # @!attribute role @@ -25,13 +25,13 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, name: nil, role: :developer) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionDeveloperMessageParam} for more details. + # {OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam} for more details. # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages # replace the previous `system` messages. # - # @param content [String, Array] The contents of the developer message. + # @param content [String, Array] The contents of the developer message. # # @param name [String] An optional name for the participant. Provides the model information to differen # @@ -39,7 +39,7 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the developer message. # - # @see OpenAI::Chat::ChatCompletionDeveloperMessageParam#content + # @see OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -47,16 +47,10 @@ module Content variant String # An array of content parts with a defined type. For developer messages, only type `text` is supported. - variant -> { - OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray - } + variant -> { OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } - end + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index bb6f6924..444bb7f7 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -32,7 +32,7 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # Annotations for the message, when applicable, as when using the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # - # @return [Array, nil] + # @return [Array, nil] optional :annotations, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessage::Annotation] } @@ -41,7 +41,7 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # audio response from the model. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @return [OpenAI::Chat::ChatCompletionAudio, nil] + # @return [OpenAI::Models::Chat::ChatCompletionAudio, nil] optional :audio, -> { OpenAI::Chat::ChatCompletionAudio }, nil?: true # @!attribute function_call @@ -50,13 +50,13 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # - # @return [OpenAI::Chat::ChatCompletionMessage::FunctionCall, nil] + # @return [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, nil] optional :function_call, -> { OpenAI::Chat::ChatCompletionMessage::FunctionCall } # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # - # @return [Array, nil] + # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionMessageToolCall] @@ -64,7 +64,7 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, refusal:, annotations: nil, audio: nil, function_call: nil, tool_calls: nil, role: :assistant) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionMessage} for more details. + # {OpenAI::Models::Chat::ChatCompletionMessage} for more details. # # A chat completion message generated by the model. # @@ -72,13 +72,13 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # # @param refusal [String, nil] The refusal message generated by the model. # - # @param annotations [Array] Annotations for the message, when applicable, as when using the + # @param annotations [Array] Annotations for the message, when applicable, as when using the # - # @param audio [OpenAI::Chat::ChatCompletionAudio, nil] If the audio output modality is requested, this object contains data + # @param audio [OpenAI::Models::Chat::ChatCompletionAudio, nil] If the audio output modality is requested, this object contains data # - # @param function_call [OpenAI::Chat::ChatCompletionMessage::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th + # @param function_call [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] Deprecated and replaced by `tool_calls`. The name and arguments of a function th # - # @param tool_calls [Array] The tool calls generated by the model, such as function calls. + # @param tool_calls [Array] The tool calls generated by the model, such as function calls. # # @param role [Symbol, :assistant] The role of the author of this message. @@ -92,17 +92,17 @@ class Annotation < OpenAI::Internal::Type::BaseModel # @!attribute url_citation # A URL citation when using web search. # - # @return [OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation] + # @return [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] required :url_citation, -> { OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation } # @!method initialize(url_citation:, type: :url_citation) # A URL citation when using web search. # - # @param url_citation [OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation] A URL citation when using web search. + # @param url_citation [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] A URL citation when using web search. # # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. - # @see OpenAI::Chat::ChatCompletionMessage::Annotation#url_citation + # @see OpenAI::Models::Chat::ChatCompletionMessage::Annotation#url_citation class URLCitation < OpenAI::Internal::Type::BaseModel # @!attribute end_index # The index of the last character of the URL citation in the message. @@ -143,7 +143,7 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @deprecated # - # @see OpenAI::Chat::ChatCompletionMessage#function_call + # @see OpenAI::Models::Chat::ChatCompletionMessage#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -162,7 +162,7 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionMessage::FunctionCall} for more details. + # {OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall} for more details. # # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index b3ba243b..b25933e3 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -33,20 +33,7 @@ module ChatCompletionMessageParam variant :function, -> { OpenAI::Chat::ChatCompletionFunctionMessageParam } # @!method self.variants - # @return [Array(OpenAI::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Chat::ChatCompletionSystemMessageParam, OpenAI::Chat::ChatCompletionUserMessageParam, OpenAI::Chat::ChatCompletionAssistantMessageParam, OpenAI::Chat::ChatCompletionToolMessageParam, OpenAI::Chat::ChatCompletionFunctionMessageParam)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Chat::ChatCompletionDeveloperMessageParam, - OpenAI::Chat::ChatCompletionSystemMessageParam, - OpenAI::Chat::ChatCompletionUserMessageParam, - OpenAI::Chat::ChatCompletionAssistantMessageParam, - OpenAI::Chat::ChatCompletionToolMessageParam, - OpenAI::Chat::ChatCompletionFunctionMessageParam - ) - end - end + # @return [Array(OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam)] end end diff --git a/lib/openai/models/chat/chat_completion_message_tool_call.rb b/lib/openai/models/chat/chat_completion_message_tool_call.rb index f64981ce..eab6d4a4 100644 --- a/lib/openai/models/chat/chat_completion_message_tool_call.rb +++ b/lib/openai/models/chat/chat_completion_message_tool_call.rb @@ -13,7 +13,7 @@ class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # @!attribute function # The function that the model called. # - # @return [OpenAI::Chat::ChatCompletionMessageToolCall::Function] + # @return [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] required :function, -> { OpenAI::Chat::ChatCompletionMessageToolCall::Function } # @!attribute type @@ -25,11 +25,11 @@ class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, function:, type: :function) # @param id [String] The ID of the tool call. # - # @param function [OpenAI::Chat::ChatCompletionMessageToolCall::Function] The function that the model called. + # @param function [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] The function that the model called. # # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. - # @see OpenAI::Chat::ChatCompletionMessageToolCall#function + # @see OpenAI::Models::Chat::ChatCompletionMessageToolCall#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON @@ -54,7 +54,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(arguments:, name:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionMessageToolCall::Function} for more details. + # {OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function} for more + # details. # # The function that the model called. # diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice.rb b/lib/openai/models/chat/chat_completion_named_tool_choice.rb index afab8ee0..493e6c0c 100644 --- a/lib/openai/models/chat/chat_completion_named_tool_choice.rb +++ b/lib/openai/models/chat/chat_completion_named_tool_choice.rb @@ -6,7 +6,7 @@ module Chat class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::Chat::ChatCompletionNamedToolChoice::Function] + # @return [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] required :function, -> { OpenAI::Chat::ChatCompletionNamedToolChoice::Function } # @!attribute type @@ -19,11 +19,11 @@ class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # Specifies a tool the model should use. Use to force the model to call a specific # function. # - # @param function [OpenAI::Chat::ChatCompletionNamedToolChoice::Function] + # @param function [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] # # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. - # @see OpenAI::Chat::ChatCompletionNamedToolChoice#function + # @see OpenAI::Models::Chat::ChatCompletionNamedToolChoice#function class Function < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the function to call. diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index 0b0ba1f0..52235ae7 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -9,7 +9,7 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # generated tokens would match this content, the entire model response can be # returned much more quickly. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Chat::ChatCompletionPredictionContent::Content } # @!attribute type @@ -21,12 +21,12 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, type: :content) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionPredictionContent} for more details. + # {OpenAI::Models::Chat::ChatCompletionPredictionContent} for more details. # # Static predicted output content, such as the content of a text file that is # being regenerated. # - # @param content [String, Array] The content that should be matched when generating a model response. + # @param content [String, Array] The content that should be matched when generating a model response. # # @param type [Symbol, :content] The type of the predicted content you want to provide. This type is @@ -34,7 +34,7 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # generated tokens would match this content, the entire model response can be # returned much more quickly. # - # @see OpenAI::Chat::ChatCompletionPredictionContent#content + # @see OpenAI::Models::Chat::ChatCompletionPredictionContent#content module Content extend OpenAI::Internal::Type::Union @@ -43,16 +43,10 @@ module Content variant String # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text inputs. - variant -> { - OpenAI::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray - } + variant -> { OpenAI::Models::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } - end + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index ffbaa513..acb72bd9 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -18,7 +18,7 @@ class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(include_usage: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionStreamOptions} for more details. + # {OpenAI::Models::Chat::ChatCompletionStreamOptions} for more details. # # Options for streaming response. Only set this when you set `stream: true`. # diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 746905d7..2d391de3 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -7,7 +7,7 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the system message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Chat::ChatCompletionSystemMessageParam::Content } # @!attribute role @@ -25,13 +25,13 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, name: nil, role: :system) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionSystemMessageParam} for more details. + # {OpenAI::Models::Chat::ChatCompletionSystemMessageParam} for more details. # # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, use `developer` messages # for this purpose instead. # - # @param content [String, Array] The contents of the system message. + # @param content [String, Array] The contents of the system message. # # @param name [String] An optional name for the participant. Provides the model information to differen # @@ -39,7 +39,7 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the system message. # - # @see OpenAI::Chat::ChatCompletionSystemMessageParam#content + # @see OpenAI::Models::Chat::ChatCompletionSystemMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -47,16 +47,10 @@ module Content variant String # An array of content parts with a defined type. For system messages, only type `text` is supported. - variant -> { - OpenAI::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray - } + variant -> { OpenAI::Models::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } - end + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = diff --git a/lib/openai/models/chat/chat_completion_token_logprob.rb b/lib/openai/models/chat/chat_completion_token_logprob.rb index c45fcee7..7a09b7c2 100644 --- a/lib/openai/models/chat/chat_completion_token_logprob.rb +++ b/lib/openai/models/chat/chat_completion_token_logprob.rb @@ -32,13 +32,13 @@ class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel # position. In rare cases, there may be fewer than the number of requested # `top_logprobs` returned. # - # @return [Array] + # @return [Array] required :top_logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] } # @!method initialize(token:, bytes:, logprob:, top_logprobs:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionTokenLogprob} for more details. + # {OpenAI::Models::Chat::ChatCompletionTokenLogprob} for more details. # # @param token [String] The token. # @@ -46,7 +46,7 @@ class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel # # @param logprob [Float] The log probability of this token, if it is within the top 20 most likely tokens # - # @param top_logprobs [Array] List of the most likely tokens and their log probability, at this token position + # @param top_logprobs [Array] List of the most likely tokens and their log probability, at this token position class TopLogprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -74,7 +74,7 @@ class TopLogprob < OpenAI::Internal::Type::BaseModel # @!method initialize(token:, bytes:, logprob:) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob} for more details. + # {OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob} for more details. # # @param token [String] The token. # diff --git a/lib/openai/models/chat/chat_completion_tool.rb b/lib/openai/models/chat/chat_completion_tool.rb index 21a86040..d6ee8c94 100644 --- a/lib/openai/models/chat/chat_completion_tool.rb +++ b/lib/openai/models/chat/chat_completion_tool.rb @@ -6,7 +6,7 @@ module Chat class ChatCompletionTool < OpenAI::Internal::Type::BaseModel # @!attribute function # - # @return [OpenAI::FunctionDefinition] + # @return [OpenAI::Models::FunctionDefinition] required :function, -> { OpenAI::FunctionDefinition } # @!attribute type @@ -16,7 +16,7 @@ class ChatCompletionTool < OpenAI::Internal::Type::BaseModel required :type, const: :function # @!method initialize(function:, type: :function) - # @param function [OpenAI::FunctionDefinition] + # @param function [OpenAI::Models::FunctionDefinition] # # @param type [Symbol, :function] The type of the tool. Currently, only `function` is supported. end diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index 506d3899..73874236 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -36,16 +36,7 @@ module Auto end # @!method self.variants - # @return [Array(Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Chat::ChatCompletionNamedToolChoice - ) - end - end + # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index 6ab285b1..4685c606 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -7,7 +7,7 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the tool message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Chat::ChatCompletionToolMessageParam::Content } # @!attribute role @@ -23,7 +23,7 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel required :tool_call_id, String # @!method initialize(content:, tool_call_id:, role: :tool) - # @param content [String, Array] The contents of the tool message. + # @param content [String, Array] The contents of the tool message. # # @param tool_call_id [String] Tool call that this message is responding to. # @@ -31,7 +31,7 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the tool message. # - # @see OpenAI::Chat::ChatCompletionToolMessageParam#content + # @see OpenAI::Models::Chat::ChatCompletionToolMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -39,16 +39,10 @@ module Content variant String # An array of content parts with a defined type. For tool messages, only type `text` is supported. - variant -> { - OpenAI::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray - } + variant -> { OpenAI::Models::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) } - end + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartTextArray = diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 33a1f532..7335c7f0 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -7,7 +7,7 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @!attribute content # The contents of the user message. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Chat::ChatCompletionUserMessageParam::Content } # @!attribute role @@ -25,12 +25,12 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @!method initialize(content:, name: nil, role: :user) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::ChatCompletionUserMessageParam} for more details. + # {OpenAI::Models::Chat::ChatCompletionUserMessageParam} for more details. # # Messages sent by an end user, containing prompts or additional context # information. # - # @param content [String, Array] The contents of the user message. + # @param content [String, Array] The contents of the user message. # # @param name [String] An optional name for the participant. Provides the model information to differen # @@ -38,7 +38,7 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # The contents of the user message. # - # @see OpenAI::Chat::ChatCompletionUserMessageParam#content + # @see OpenAI::Models::Chat::ChatCompletionUserMessageParam#content module Content extend OpenAI::Internal::Type::Union @@ -46,26 +46,10 @@ module Content variant String # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text, image, or audio inputs. - variant -> { OpenAI::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray } + variant -> { OpenAI::Models::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartImage, - OpenAI::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Chat::ChatCompletionContentPart::File - ) - ] - ) - end - end + # @return [Array(String, Array)] # @type [OpenAI::Internal::Type::Converter] ChatCompletionContentPartArray = diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index 69ccac80..458bda0c 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -18,7 +18,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # [images](https://platform.openai.com/docs/guides/vision), and # [audio](https://platform.openai.com/docs/guides/audio). # - # @return [Array] + # @return [Array] required :messages, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Chat::ChatCompletionMessageParam] } @@ -29,7 +29,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::ChatModel] + # @return [String, Symbol, OpenAI::Models::ChatModel] required :model, union: -> { OpenAI::Chat::CompletionCreateParams::Model } # @!attribute audio @@ -37,7 +37,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # `modalities: ["audio"]`. # [Learn more](https://platform.openai.com/docs/guides/audio). # - # @return [OpenAI::Chat::ChatCompletionAudioParam, nil] + # @return [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] optional :audio, -> { OpenAI::Chat::ChatCompletionAudioParam }, nil?: true # @!attribute frequency_penalty @@ -66,7 +66,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # `none` is the default when no functions are present. `auto` is the default if # functions are present. # - # @return [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption, nil] + # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, nil] optional :function_call, union: -> { OpenAI::Chat::CompletionCreateParams::FunctionCall } # @!attribute functions @@ -76,7 +76,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # A list of functions the model may generate JSON inputs for. # - # @return [Array, nil] + # @return [Array, nil] optional :functions, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::CompletionCreateParams::Function] } @@ -146,7 +146,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # `["text", "audio"]` # - # @return [Array, nil] + # @return [Array, nil] optional :modalities, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Chat::CompletionCreateParams::Modality] }, nil?: true @@ -171,7 +171,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # Static predicted output content, such as the content of a text file that is # being regenerated. # - # @return [OpenAI::Chat::ChatCompletionPredictionContent, nil] + # @return [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] optional :prediction, -> { OpenAI::Chat::ChatCompletionPredictionContent }, nil?: true # @!attribute presence_penalty @@ -190,7 +190,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute response_format @@ -205,7 +205,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. # - # @return [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject, nil] + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::StructuredOutput::JsonSchemaConverter, OpenAI::Models::ResponseFormatJSONObject, nil] optional :response_format, union: -> { OpenAI::Chat::CompletionCreateParams::ResponseFormat } # @!attribute seed @@ -237,7 +237,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] + # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Chat::CompletionCreateParams::ServiceTier }, nil?: true # @!attribute stop @@ -260,7 +260,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute stream_options # Options for streaming response. Only set this when you set `stream: true`. # - # @return [OpenAI::Chat::ChatCompletionStreamOptions, nil] + # @return [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] optional :stream_options, -> { OpenAI::Chat::ChatCompletionStreamOptions }, nil?: true # @!attribute temperature @@ -283,7 +283,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # `none` is the default when no tools are present. `auto` is the default if tools # are present. # - # @return [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice, nil] + # @return [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Chat::ChatCompletionToolChoiceOption } # @!attribute tools @@ -291,8 +291,13 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # tool. Use this to provide a list of functions the model may generate JSON inputs # for. A max of 128 functions are supported. # - # @return [Array, nil] - optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Chat::ChatCompletionTool] } + # @return [Array, nil] + optional :tools, + -> { + OpenAI::Internal::Type::ArrayOf[union: OpenAI::UnionOf[ + OpenAI::Chat::ChatCompletionTool, OpenAI::StructuredOutput::JsonSchemaConverter + ]] + } # @!attribute top_logprobs # An integer between 0 and 20 specifying the number of most likely tokens to @@ -313,8 +318,8 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :top_p, Float, nil?: true # @!attribute user - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] @@ -325,24 +330,24 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # - # @return [OpenAI::Chat::CompletionCreateParams::WebSearchOptions, nil] + # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, nil] optional :web_search_options, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions } # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Chat::CompletionCreateParams} for more details. # - # @param messages [Array] A list of messages comprising the conversation so far. Depending on the + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # - # @param model [String, Symbol, OpenAI::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param audio [OpenAI::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with # # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param function_call [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. # - # @param functions [Array] Deprecated in favor of `tools`. + # @param functions [Array] Deprecated in favor of `tools`. # # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # @@ -354,43 +359,43 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param modalities [Array, nil] Output types that you would like the model to generate. + # @param modalities [Array, nil] Output types that you would like the model to generate. # # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param prediction [OpenAI::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::StructuredOutput::JsonSchemaConverter, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # - # @param web_search_options [OpenAI::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -411,11 +416,7 @@ module Model variant enum: -> { OpenAI::ChatModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ChatModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end # @deprecated @@ -457,16 +458,7 @@ module FunctionCallMode end # @!method self.variants - # @return [Array(Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol, - OpenAI::Chat::ChatCompletionFunctionCallOption - ) - end - end + # @return [Array(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)] end # @deprecated @@ -499,7 +491,7 @@ class Function < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, description: nil, parameters: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::CompletionCreateParams::Function} for more details. + # {OpenAI::Models::Chat::CompletionCreateParams::Function} for more details. # # @param name [String] The name of the function to be called. Must be a-z, A-Z, 0-9, or contain undersc # @@ -538,6 +530,12 @@ module ResponseFormat # Learn more about [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). variant -> { OpenAI::ResponseFormatJSONSchema } + # An {OpenAI::BaseModel} can be provided and implicitly converted into {OpenAI::ResponseFormatJSONSchema}. + # See examples for more details. + # + # Learn more about [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + variant -> { OpenAI::StructuredOutput::JsonSchemaConverter } + # JSON object response format. An older method of generating JSON responses. # Using `json_schema` is recommended for models that support it. Note that the # model will not generate JSON without a system or user message instructing it @@ -545,17 +543,7 @@ module ResponseFormat variant -> { OpenAI::ResponseFormatJSONObject } # @!method self.variants - # @return [Array(OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONSchema, - OpenAI::ResponseFormatJSONObject - ) - end - end + # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -595,15 +583,11 @@ module Stop variant String - variant -> { OpenAI::Chat::CompletionCreateParams::Stop::StringArray } + variant -> { OpenAI::Models::Chat::CompletionCreateParams::Stop::StringArray } # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.nilable(T.any(String, T::Array[String])) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end @@ -613,34 +597,35 @@ class WebSearchOptions < OpenAI::Internal::Type::BaseModel # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @return [Symbol, OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize, nil] + # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize, nil] optional :search_context_size, enum: -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize } # @!attribute user_location # Approximate location parameters for the search. # - # @return [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] + # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] optional :user_location, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation }, nil?: true # @!method initialize(search_context_size: nil, user_location: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::CompletionCreateParams::WebSearchOptions} for more details. + # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions} for more + # details. # # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # - # @param search_context_size [Symbol, OpenAI::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] High level guidance for the amount of context window space to use for the + # @param search_context_size [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] High level guidance for the amount of context window space to use for the # - # @param user_location [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] Approximate location parameters for the search. + # @param user_location [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] Approximate location parameters for the search. # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @see OpenAI::Chat::CompletionCreateParams::WebSearchOptions#search_context_size + # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#search_context_size module SearchContextSize extend OpenAI::Internal::Type::Enum @@ -652,12 +637,12 @@ module SearchContextSize # @return [Array] end - # @see OpenAI::Chat::CompletionCreateParams::WebSearchOptions#user_location + # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#user_location class UserLocation < OpenAI::Internal::Type::BaseModel # @!attribute approximate # Approximate location parameters for the search. # - # @return [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] + # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] required :approximate, -> { OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate } @@ -669,16 +654,16 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @!method initialize(approximate:, type: :approximate) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation} for more - # details. + # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation} + # for more details. # # Approximate location parameters for the search. # - # @param approximate [OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] Approximate location parameters for the search. + # @param approximate [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] Approximate location parameters for the search. # # @param type [Symbol, :approximate] The type of location approximation. Always `approximate`. - # @see OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate + # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate class Approximate < OpenAI::Internal::Type::BaseModel # @!attribute city # Free text input for the city of the user, e.g. `San Francisco`. @@ -708,7 +693,7 @@ class Approximate < OpenAI::Internal::Type::BaseModel # @!method initialize(city: nil, country: nil, region: nil, timezone: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate} + # {OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate} # for more details. # # Approximate location parameters for the search. diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index b1f9e734..0e18202c 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -38,7 +38,7 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Chat::CompletionListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Chat::CompletionListParams::Order, nil] optional :order, enum: -> { OpenAI::Chat::CompletionListParams::Order } # @!method initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) @@ -53,7 +53,7 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # # @param model [String] The model used to generate the Chat Completions. # - # @param order [Symbol, OpenAI::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` + # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 8292066a..8f2c139c 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -25,7 +25,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Chat::Completions::MessageListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order, nil] optional :order, enum: -> { OpenAI::Chat::Completions::MessageListParams::Order } # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) @@ -36,7 +36,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of messages to retrieve. # - # @param order [Symbol, OpenAI::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo + # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index e2cf7b8d..6f8732aa 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -37,6 +37,7 @@ module ChatModel GPT_4O_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-search-preview-2025-03-11" GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11 = :"gpt-4o-mini-search-preview-2025-03-11" CHATGPT_4O_LATEST = :"chatgpt-4o-latest" + CODEX_MINI_LATEST = :"codex-mini-latest" GPT_4O_MINI = :"gpt-4o-mini" GPT_4O_MINI_2024_07_18 = :"gpt-4o-mini-2024-07-18" GPT_4_TURBO = :"gpt-4-turbo" diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 2847efa6..a8a32298 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -19,7 +19,7 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel # - `lt`: less than # - `lte`: less than or equal # - # @return [Symbol, OpenAI::ComparisonFilter::Type] + # @return [Symbol, OpenAI::Models::ComparisonFilter::Type] required :type, enum: -> { OpenAI::ComparisonFilter::Type } # @!attribute value @@ -30,15 +30,15 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel required :value, union: -> { OpenAI::ComparisonFilter::Value } # @!method initialize(key:, type:, value:) - # Some parameter documentations has been truncated, see {OpenAI::ComparisonFilter} - # for more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ComparisonFilter} for more details. # # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. # # @param key [String] The key to compare against the value. # - # @param type [Symbol, OpenAI::ComparisonFilter::Type] Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. + # @param type [Symbol, OpenAI::Models::ComparisonFilter::Type] Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # # @param value [String, Float, Boolean] The value to compare against the attribute key; supports string, number, or bool @@ -51,7 +51,7 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel # - `lt`: less than # - `lte`: less than or equal # - # @see OpenAI::ComparisonFilter#type + # @see OpenAI::Models::ComparisonFilter#type module Type extend OpenAI::Internal::Type::Enum @@ -69,7 +69,7 @@ module Type # The value to compare against the attribute key; supports string, number, or # boolean types. # - # @see OpenAI::ComparisonFilter#value + # @see OpenAI::Models::ComparisonFilter#value module Value extend OpenAI::Internal::Type::Union @@ -81,10 +81,6 @@ module Value # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index 8d33e0b0..e6a702d0 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -15,7 +15,7 @@ class Completion < OpenAI::Internal::Type::BaseModel # @!attribute choices # The list of completion choices the model generated for the input prompt. # - # @return [Array] + # @return [Array] required :choices, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::CompletionChoice] } # @!attribute created @@ -48,19 +48,19 @@ class Completion < OpenAI::Internal::Type::BaseModel # @!attribute usage # Usage statistics for the completion request. # - # @return [OpenAI::CompletionUsage, nil] + # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::CompletionUsage } # @!method initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion) - # Some parameter documentations has been truncated, see {OpenAI::Completion} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Completion} for more details. # # Represents a completion response from the API. Note: both the streamed and # non-streamed response objects share the same shape (unlike the chat endpoint). # # @param id [String] A unique identifier for the completion. # - # @param choices [Array] The list of completion choices the model generated for the input prompt. + # @param choices [Array] The list of completion choices the model generated for the input prompt. # # @param created [Integer] The Unix timestamp (in seconds) of when the completion was created. # @@ -68,7 +68,7 @@ class Completion < OpenAI::Internal::Type::BaseModel # # @param system_fingerprint [String] This fingerprint represents the backend configuration that the model runs with. # - # @param usage [OpenAI::CompletionUsage] Usage statistics for the completion request. + # @param usage [OpenAI::Models::CompletionUsage] Usage statistics for the completion request. # # @param object [Symbol, :text_completion] The object type, which is always "text_completion" end diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index c2a91320..07f6b428 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -9,7 +9,7 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. # - # @return [Symbol, OpenAI::CompletionChoice::FinishReason] + # @return [Symbol, OpenAI::Models::CompletionChoice::FinishReason] required :finish_reason, enum: -> { OpenAI::CompletionChoice::FinishReason } # @!attribute index @@ -19,7 +19,7 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # @!attribute logprobs # - # @return [OpenAI::CompletionChoice::Logprobs, nil] + # @return [OpenAI::Models::CompletionChoice::Logprobs, nil] required :logprobs, -> { OpenAI::CompletionChoice::Logprobs }, nil?: true # @!attribute text @@ -28,14 +28,14 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel required :text, String # @!method initialize(finish_reason:, index:, logprobs:, text:) - # Some parameter documentations has been truncated, see {OpenAI::CompletionChoice} - # for more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::CompletionChoice} for more details. # - # @param finish_reason [Symbol, OpenAI::CompletionChoice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model + # @param finish_reason [Symbol, OpenAI::Models::CompletionChoice::FinishReason] The reason the model stopped generating tokens. This will be `stop` if the model # # @param index [Integer] # - # @param logprobs [OpenAI::CompletionChoice::Logprobs, nil] + # @param logprobs [OpenAI::Models::CompletionChoice::Logprobs, nil] # # @param text [String] @@ -44,7 +44,7 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # number of tokens specified in the request was reached, or `content_filter` if # content was omitted due to a flag from our content filters. # - # @see OpenAI::CompletionChoice#finish_reason + # @see OpenAI::Models::CompletionChoice#finish_reason module FinishReason extend OpenAI::Internal::Type::Enum @@ -56,7 +56,7 @@ module FinishReason # @return [Array] end - # @see OpenAI::CompletionChoice#logprobs + # @see OpenAI::Models::CompletionChoice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel # @!attribute text_offset # diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index bc8a3619..4da42de4 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -16,7 +16,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::CompletionCreateParams::Model] + # @return [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] required :model, union: -> { OpenAI::CompletionCreateParams::Model } # @!attribute prompt @@ -143,7 +143,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute stream_options # Options for streaming response. Only set this when you set `stream: true`. # - # @return [OpenAI::Chat::ChatCompletionStreamOptions, nil] + # @return [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] optional :stream_options, -> { OpenAI::Chat::ChatCompletionStreamOptions }, nil?: true # @!attribute suffix @@ -186,7 +186,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::CompletionCreateParams} for more details. # - # @param model [String, Symbol, OpenAI::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings # @@ -210,7 +210,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param suffix [String, nil] The suffix that comes after a completion of inserted text. # @@ -232,11 +232,11 @@ module Model variant String - variant const: -> { OpenAI::CompletionCreateParams::Model::GPT_3_5_TURBO_INSTRUCT } + variant const: -> { OpenAI::Models::CompletionCreateParams::Model::GPT_3_5_TURBO_INSTRUCT } - variant const: -> { OpenAI::CompletionCreateParams::Model::DAVINCI_002 } + variant const: -> { OpenAI::Models::CompletionCreateParams::Model::DAVINCI_002 } - variant const: -> { OpenAI::CompletionCreateParams::Model::BABBAGE_002 } + variant const: -> { OpenAI::Models::CompletionCreateParams::Model::BABBAGE_002 } # @!method self.variants # @return [Array(String, Symbol)] @@ -265,19 +265,15 @@ module Prompt variant String - variant -> { OpenAI::CompletionCreateParams::Prompt::StringArray } + variant -> { OpenAI::Models::CompletionCreateParams::Prompt::StringArray } - variant -> { OpenAI::CompletionCreateParams::Prompt::IntegerArray } + variant -> { OpenAI::Models::CompletionCreateParams::Prompt::IntegerArray } - variant -> { OpenAI::CompletionCreateParams::Prompt::ArrayOfToken2DArray } + variant -> { OpenAI::Models::CompletionCreateParams::Prompt::ArrayOfToken2DArray } # @!method self.variants # @return [Array(String, Array, Array, Array>)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -297,15 +293,11 @@ module Stop variant String - variant -> { OpenAI::CompletionCreateParams::Stop::StringArray } + variant -> { OpenAI::Models::CompletionCreateParams::Stop::StringArray } # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.nilable(T.any(String, T::Array[String])) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index 202f9218..8ca37f94 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -24,13 +24,13 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # @!attribute completion_tokens_details # Breakdown of tokens used in a completion. # - # @return [OpenAI::CompletionUsage::CompletionTokensDetails, nil] + # @return [OpenAI::Models::CompletionUsage::CompletionTokensDetails, nil] optional :completion_tokens_details, -> { OpenAI::CompletionUsage::CompletionTokensDetails } # @!attribute prompt_tokens_details # Breakdown of tokens used in the prompt. # - # @return [OpenAI::CompletionUsage::PromptTokensDetails, nil] + # @return [OpenAI::Models::CompletionUsage::PromptTokensDetails, nil] optional :prompt_tokens_details, -> { OpenAI::CompletionUsage::PromptTokensDetails } # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:, completion_tokens_details: nil, prompt_tokens_details: nil) @@ -42,11 +42,11 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # # @param total_tokens [Integer] Total number of tokens used in the request (prompt + completion). # - # @param completion_tokens_details [OpenAI::CompletionUsage::CompletionTokensDetails] Breakdown of tokens used in a completion. + # @param completion_tokens_details [OpenAI::Models::CompletionUsage::CompletionTokensDetails] Breakdown of tokens used in a completion. # - # @param prompt_tokens_details [OpenAI::CompletionUsage::PromptTokensDetails] Breakdown of tokens used in the prompt. + # @param prompt_tokens_details [OpenAI::Models::CompletionUsage::PromptTokensDetails] Breakdown of tokens used in the prompt. - # @see OpenAI::CompletionUsage#completion_tokens_details + # @see OpenAI::Models::CompletionUsage#completion_tokens_details class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute accepted_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that @@ -78,7 +78,7 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(accepted_prediction_tokens: nil, audio_tokens: nil, reasoning_tokens: nil, rejected_prediction_tokens: nil) # Some parameter documentations has been truncated, see - # {OpenAI::CompletionUsage::CompletionTokensDetails} for more details. + # {OpenAI::Models::CompletionUsage::CompletionTokensDetails} for more details. # # Breakdown of tokens used in a completion. # @@ -91,7 +91,7 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @param rejected_prediction_tokens [Integer] When using Predicted Outputs, the number of tokens in the end - # @see OpenAI::CompletionUsage#prompt_tokens_details + # @see OpenAI::Models::CompletionUsage#prompt_tokens_details class PromptTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute audio_tokens # Audio input tokens present in the prompt. diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 2791671b..e4e7fdcf 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -7,24 +7,24 @@ class CompoundFilter < OpenAI::Internal::Type::BaseModel # Array of filters to combine. Items can be `ComparisonFilter` or # `CompoundFilter`. # - # @return [Array] + # @return [Array] required :filters, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::CompoundFilter::Filter] } # @!attribute type # Type of operation: `and` or `or`. # - # @return [Symbol, OpenAI::CompoundFilter::Type] + # @return [Symbol, OpenAI::Models::CompoundFilter::Type] required :type, enum: -> { OpenAI::CompoundFilter::Type } # @!method initialize(filters:, type:) - # Some parameter documentations has been truncated, see {OpenAI::CompoundFilter} - # for more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::CompoundFilter} for more details. # # Combine multiple filters using `and` or `or`. # - # @param filters [Array] Array of filters to combine. Items can be `ComparisonFilter` or `CompoundFilter` + # @param filters [Array] Array of filters to combine. Items can be `ComparisonFilter` or `CompoundFilter` # - # @param type [Symbol, OpenAI::CompoundFilter::Type] Type of operation: `and` or `or`. + # @param type [Symbol, OpenAI::Models::CompoundFilter::Type] Type of operation: `and` or `or`. # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. @@ -37,16 +37,12 @@ module Filter variant OpenAI::Internal::Type::Unknown # @!method self.variants - # @return [Array(OpenAI::ComparisonFilter, Object)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::ComparisonFilter, T.anything) } - end + # @return [Array(OpenAI::Models::ComparisonFilter, Object)] end # Type of operation: `and` or `or`. # - # @see OpenAI::CompoundFilter#type + # @see OpenAI::Models::CompoundFilter#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/container_create_params.rb b/lib/openai/models/container_create_params.rb new file mode 100644 index 00000000..00a41b82 --- /dev/null +++ b/lib/openai/models/container_create_params.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#create + class ContainerCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute name + # Name of the container to create. + # + # @return [String] + required :name, String + + # @!attribute expires_after + # Container expiration time in seconds relative to the 'anchor' time. + # + # @return [OpenAI::Models::ContainerCreateParams::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::ContainerCreateParams::ExpiresAfter } + + # @!attribute file_ids + # IDs of files to copy to the container. + # + # @return [Array, nil] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(name:, expires_after: nil, file_ids: nil, request_options: {}) + # @param name [String] Name of the container to create. + # + # @param expires_after [OpenAI::Models::ContainerCreateParams::ExpiresAfter] Container expiration time in seconds relative to the 'anchor' time. + # + # @param file_ids [Array] IDs of files to copy to the container. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Time anchor for the expiration time. Currently only 'last_active_at' is + # supported. + # + # @return [Symbol, OpenAI::Models::ContainerCreateParams::ExpiresAfter::Anchor] + required :anchor, enum: -> { OpenAI::ContainerCreateParams::ExpiresAfter::Anchor } + + # @!attribute minutes + # + # @return [Integer] + required :minutes, Integer + + # @!method initialize(anchor:, minutes:) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerCreateParams::ExpiresAfter} for more details. + # + # Container expiration time in seconds relative to the 'anchor' time. + # + # @param anchor [Symbol, OpenAI::Models::ContainerCreateParams::ExpiresAfter::Anchor] Time anchor for the expiration time. Currently only 'last_active_at' is supporte + # + # @param minutes [Integer] + + # Time anchor for the expiration time. Currently only 'last_active_at' is + # supported. + # + # @see OpenAI::Models::ContainerCreateParams::ExpiresAfter#anchor + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT = :last_active_at + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/container_create_response.rb b/lib/openai/models/container_create_response.rb new file mode 100644 index 00000000..3db15ec9 --- /dev/null +++ b/lib/openai/models/container_create_response.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#create + class ContainerCreateResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the container. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the container was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute name + # Name of the container. + # + # @return [String] + required :name, String + + # @!attribute object + # The type of this object. + # + # @return [String] + required :object, String + + # @!attribute status + # Status of the container (e.g., active, deleted). + # + # @return [String] + required :status, String + + # @!attribute expires_after + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @return [OpenAI::Models::ContainerCreateResponse::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::Models::ContainerCreateResponse::ExpiresAfter } + + # @!method initialize(id:, created_at:, name:, object:, status:, expires_after: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerCreateResponse} for more details. + # + # @param id [String] Unique identifier for the container. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the container was created. + # + # @param name [String] Name of the container. + # + # @param object [String] The type of this object. + # + # @param status [String] Status of the container (e.g., active, deleted). + # + # @param expires_after [OpenAI::Models::ContainerCreateResponse::ExpiresAfter] The container will expire after this time period. + + # @see OpenAI::Models::ContainerCreateResponse#expires_after + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # The reference point for the expiration. + # + # @return [Symbol, OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor, nil] + optional :anchor, enum: -> { OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor } + + # @!attribute minutes + # The number of minutes after the anchor before the container expires. + # + # @return [Integer, nil] + optional :minutes, Integer + + # @!method initialize(anchor: nil, minutes: nil) + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @param anchor [Symbol, OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor] The reference point for the expiration. + # + # @param minutes [Integer] The number of minutes after the anchor before the container expires. + + # The reference point for the expiration. + # + # @see OpenAI::Models::ContainerCreateResponse::ExpiresAfter#anchor + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT = :last_active_at + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/container_delete_params.rb b/lib/openai/models/container_delete_params.rb new file mode 100644 index 00000000..becd24c9 --- /dev/null +++ b/lib/openai/models/container_delete_params.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#delete + class ContainerDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end +end diff --git a/lib/openai/models/container_list_params.rb b/lib/openai/models/container_list_params.rb new file mode 100644 index 00000000..017dc9cf --- /dev/null +++ b/lib/openai/models/container_list_params.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#list + class ContainerListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute after + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + # + # @return [String, nil] + optional :after, String + + # @!attribute limit + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + # + # @return [Integer, nil] + optional :limit, Integer + + # @!attribute order + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + # + # @return [Symbol, OpenAI::Models::ContainerListParams::Order, nil] + optional :order, enum: -> { OpenAI::ContainerListParams::Order } + + # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # + # @param order [Symbol, OpenAI::Models::ContainerListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + module Order + extend OpenAI::Internal::Type::Enum + + ASC = :asc + DESC = :desc + + # @!method self.values + # @return [Array] + end + end + end +end diff --git a/lib/openai/models/container_list_response.rb b/lib/openai/models/container_list_response.rb new file mode 100644 index 00000000..6d12abc7 --- /dev/null +++ b/lib/openai/models/container_list_response.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#list + class ContainerListResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the container. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the container was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute name + # Name of the container. + # + # @return [String] + required :name, String + + # @!attribute object + # The type of this object. + # + # @return [String] + required :object, String + + # @!attribute status + # Status of the container (e.g., active, deleted). + # + # @return [String] + required :status, String + + # @!attribute expires_after + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @return [OpenAI::Models::ContainerListResponse::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::Models::ContainerListResponse::ExpiresAfter } + + # @!method initialize(id:, created_at:, name:, object:, status:, expires_after: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerListResponse} for more details. + # + # @param id [String] Unique identifier for the container. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the container was created. + # + # @param name [String] Name of the container. + # + # @param object [String] The type of this object. + # + # @param status [String] Status of the container (e.g., active, deleted). + # + # @param expires_after [OpenAI::Models::ContainerListResponse::ExpiresAfter] The container will expire after this time period. + + # @see OpenAI::Models::ContainerListResponse#expires_after + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # The reference point for the expiration. + # + # @return [Symbol, OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor, nil] + optional :anchor, enum: -> { OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor } + + # @!attribute minutes + # The number of minutes after the anchor before the container expires. + # + # @return [Integer, nil] + optional :minutes, Integer + + # @!method initialize(anchor: nil, minutes: nil) + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @param anchor [Symbol, OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor] The reference point for the expiration. + # + # @param minutes [Integer] The number of minutes after the anchor before the container expires. + + # The reference point for the expiration. + # + # @see OpenAI::Models::ContainerListResponse::ExpiresAfter#anchor + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT = :last_active_at + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/container_retrieve_params.rb b/lib/openai/models/container_retrieve_params.rb new file mode 100644 index 00000000..821d0549 --- /dev/null +++ b/lib/openai/models/container_retrieve_params.rb @@ -0,0 +1,14 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#retrieve + class ContainerRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end +end diff --git a/lib/openai/models/container_retrieve_response.rb b/lib/openai/models/container_retrieve_response.rb new file mode 100644 index 00000000..19520ab0 --- /dev/null +++ b/lib/openai/models/container_retrieve_response.rb @@ -0,0 +1,98 @@ +# frozen_string_literal: true + +module OpenAI + module Models + # @see OpenAI::Resources::Containers#retrieve + class ContainerRetrieveResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the container. + # + # @return [String] + required :id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the container was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute name + # Name of the container. + # + # @return [String] + required :name, String + + # @!attribute object + # The type of this object. + # + # @return [String] + required :object, String + + # @!attribute status + # Status of the container (e.g., active, deleted). + # + # @return [String] + required :status, String + + # @!attribute expires_after + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @return [OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter } + + # @!method initialize(id:, created_at:, name:, object:, status:, expires_after: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerRetrieveResponse} for more details. + # + # @param id [String] Unique identifier for the container. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the container was created. + # + # @param name [String] Name of the container. + # + # @param object [String] The type of this object. + # + # @param status [String] Status of the container (e.g., active, deleted). + # + # @param expires_after [OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter] The container will expire after this time period. + + # @see OpenAI::Models::ContainerRetrieveResponse#expires_after + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # The reference point for the expiration. + # + # @return [Symbol, OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor, nil] + optional :anchor, enum: -> { OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor } + + # @!attribute minutes + # The number of minutes after the anchor before the container expires. + # + # @return [Integer, nil] + optional :minutes, Integer + + # @!method initialize(anchor: nil, minutes: nil) + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + # + # @param anchor [Symbol, OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor] The reference point for the expiration. + # + # @param minutes [Integer] The number of minutes after the anchor before the container expires. + + # The reference point for the expiration. + # + # @see OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter#anchor + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT = :last_active_at + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/containers/file_create_params.rb b/lib/openai/models/containers/file_create_params.rb new file mode 100644 index 00000000..07528c8e --- /dev/null +++ b/lib/openai/models/containers/file_create_params.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#create + class FileCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute file + # The File object (not file name) to be uploaded. + # + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart, nil] + optional :file, OpenAI::Internal::Type::FileInput + + # @!attribute file_id + # Name of the file to create. + # + # @return [String, nil] + optional :file_id, String + + # @!method initialize(file: nil, file_id: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Containers::FileCreateParams} for more details. + # + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. + # + # @param file_id [String] Name of the file to create. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/containers/file_create_response.rb b/lib/openai/models/containers/file_create_response.rb new file mode 100644 index 00000000..408ac8c7 --- /dev/null +++ b/lib/openai/models/containers/file_create_response.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#create + class FileCreateResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the file. + # + # @return [String] + required :id, String + + # @!attribute bytes + # Size of the file in bytes. + # + # @return [Integer] + required :bytes, Integer + + # @!attribute container_id + # The container this file belongs to. + # + # @return [String] + required :container_id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the file was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute object + # The type of this object (`container.file`). + # + # @return [Symbol, :"container.file"] + required :object, const: :"container.file" + + # @!attribute path + # Path of the file in the container. + # + # @return [String] + required :path, String + + # @!attribute source + # Source of the file (e.g., `user`, `assistant`). + # + # @return [String] + required :source, String + + # @!method initialize(id:, bytes:, container_id:, created_at:, path:, source:, object: :"container.file") + # @param id [String] Unique identifier for the file. + # + # @param bytes [Integer] Size of the file in bytes. + # + # @param container_id [String] The container this file belongs to. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the file was created. + # + # @param path [String] Path of the file in the container. + # + # @param source [String] Source of the file (e.g., `user`, `assistant`). + # + # @param object [Symbol, :"container.file"] The type of this object (`container.file`). + end + end + end +end diff --git a/lib/openai/models/containers/file_delete_params.rb b/lib/openai/models/containers/file_delete_params.rb new file mode 100644 index 00000000..b9865119 --- /dev/null +++ b/lib/openai/models/containers/file_delete_params.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#delete + class FileDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute container_id + # + # @return [String] + required :container_id, String + + # @!method initialize(container_id:, request_options: {}) + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/containers/file_list_params.rb b/lib/openai/models/containers/file_list_params.rb new file mode 100644 index 00000000..a7b2de9b --- /dev/null +++ b/lib/openai/models/containers/file_list_params.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#list + class FileListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute after + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + # + # @return [String, nil] + optional :after, String + + # @!attribute limit + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + # + # @return [Integer, nil] + optional :limit, Integer + + # @!attribute order + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + # + # @return [Symbol, OpenAI::Models::Containers::FileListParams::Order, nil] + optional :order, enum: -> { OpenAI::Containers::FileListParams::Order } + + # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Containers::FileListParams} for more details. + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # + # @param order [Symbol, OpenAI::Models::Containers::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + module Order + extend OpenAI::Internal::Type::Enum + + ASC = :asc + DESC = :desc + + # @!method self.values + # @return [Array] + end + end + end + end +end diff --git a/lib/openai/models/containers/file_list_response.rb b/lib/openai/models/containers/file_list_response.rb new file mode 100644 index 00000000..55433ce5 --- /dev/null +++ b/lib/openai/models/containers/file_list_response.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#list + class FileListResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the file. + # + # @return [String] + required :id, String + + # @!attribute bytes + # Size of the file in bytes. + # + # @return [Integer] + required :bytes, Integer + + # @!attribute container_id + # The container this file belongs to. + # + # @return [String] + required :container_id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the file was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute object + # The type of this object (`container.file`). + # + # @return [Symbol, :"container.file"] + required :object, const: :"container.file" + + # @!attribute path + # Path of the file in the container. + # + # @return [String] + required :path, String + + # @!attribute source + # Source of the file (e.g., `user`, `assistant`). + # + # @return [String] + required :source, String + + # @!method initialize(id:, bytes:, container_id:, created_at:, path:, source:, object: :"container.file") + # @param id [String] Unique identifier for the file. + # + # @param bytes [Integer] Size of the file in bytes. + # + # @param container_id [String] The container this file belongs to. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the file was created. + # + # @param path [String] Path of the file in the container. + # + # @param source [String] Source of the file (e.g., `user`, `assistant`). + # + # @param object [Symbol, :"container.file"] The type of this object (`container.file`). + end + end + end +end diff --git a/lib/openai/models/containers/file_retrieve_params.rb b/lib/openai/models/containers/file_retrieve_params.rb new file mode 100644 index 00000000..781e8f38 --- /dev/null +++ b/lib/openai/models/containers/file_retrieve_params.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#retrieve + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute container_id + # + # @return [String] + required :container_id, String + + # @!method initialize(container_id:, request_options: {}) + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/containers/file_retrieve_response.rb b/lib/openai/models/containers/file_retrieve_response.rb new file mode 100644 index 00000000..4c905b59 --- /dev/null +++ b/lib/openai/models/containers/file_retrieve_response.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + # @see OpenAI::Resources::Containers::Files#retrieve + class FileRetrieveResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # Unique identifier for the file. + # + # @return [String] + required :id, String + + # @!attribute bytes + # Size of the file in bytes. + # + # @return [Integer] + required :bytes, Integer + + # @!attribute container_id + # The container this file belongs to. + # + # @return [String] + required :container_id, String + + # @!attribute created_at + # Unix timestamp (in seconds) when the file was created. + # + # @return [Integer] + required :created_at, Integer + + # @!attribute object + # The type of this object (`container.file`). + # + # @return [Symbol, :"container.file"] + required :object, const: :"container.file" + + # @!attribute path + # Path of the file in the container. + # + # @return [String] + required :path, String + + # @!attribute source + # Source of the file (e.g., `user`, `assistant`). + # + # @return [String] + required :source, String + + # @!method initialize(id:, bytes:, container_id:, created_at:, path:, source:, object: :"container.file") + # @param id [String] Unique identifier for the file. + # + # @param bytes [Integer] Size of the file in bytes. + # + # @param container_id [String] The container this file belongs to. + # + # @param created_at [Integer] Unix timestamp (in seconds) when the file was created. + # + # @param path [String] Path of the file in the container. + # + # @param source [String] Source of the file (e.g., `user`, `assistant`). + # + # @param object [Symbol, :"container.file"] The type of this object (`container.file`). + end + end + end +end diff --git a/lib/openai/models/containers/files/content_retrieve_params.rb b/lib/openai/models/containers/files/content_retrieve_params.rb new file mode 100644 index 00000000..b765ee47 --- /dev/null +++ b/lib/openai/models/containers/files/content_retrieve_params.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Containers + module Files + # @see OpenAI::Resources::Containers::Files::Content#retrieve + class ContentRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!attribute container_id + # + # @return [String] + required :container_id, String + + # @!method initialize(container_id:, request_options: {}) + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end + end +end diff --git a/lib/openai/models/create_embedding_response.rb b/lib/openai/models/create_embedding_response.rb index ec420899..a7bcbff1 100644 --- a/lib/openai/models/create_embedding_response.rb +++ b/lib/openai/models/create_embedding_response.rb @@ -7,7 +7,7 @@ class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel # @!attribute data # The list of embeddings generated by the model. # - # @return [Array] + # @return [Array] required :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Embedding] } # @!attribute model @@ -25,19 +25,19 @@ class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel # @!attribute usage # The usage information for the request. # - # @return [OpenAI::CreateEmbeddingResponse::Usage] + # @return [OpenAI::Models::CreateEmbeddingResponse::Usage] required :usage, -> { OpenAI::CreateEmbeddingResponse::Usage } # @!method initialize(data:, model:, usage:, object: :list) - # @param data [Array] The list of embeddings generated by the model. + # @param data [Array] The list of embeddings generated by the model. # # @param model [String] The name of the model used to generate the embedding. # - # @param usage [OpenAI::CreateEmbeddingResponse::Usage] The usage information for the request. + # @param usage [OpenAI::Models::CreateEmbeddingResponse::Usage] The usage information for the request. # # @param object [Symbol, :list] The object type, which is always "list". - # @see OpenAI::CreateEmbeddingResponse#usage + # @see OpenAI::Models::CreateEmbeddingResponse#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute prompt_tokens # The number of tokens used by the prompt. diff --git a/lib/openai/models/embedding.rb b/lib/openai/models/embedding.rb index 6f8e6de3..9a511def 100644 --- a/lib/openai/models/embedding.rb +++ b/lib/openai/models/embedding.rb @@ -24,8 +24,8 @@ class Embedding < OpenAI::Internal::Type::BaseModel required :object, const: :embedding # @!method initialize(embedding:, index:, object: :embedding) - # Some parameter documentations has been truncated, see {OpenAI::Embedding} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Embedding} for more details. # # Represents an embedding vector returned by embedding endpoint. # diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 51f4ffa2..fea9547e 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -28,7 +28,7 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # [Model overview](https://platform.openai.com/docs/models) for descriptions of # them. # - # @return [String, Symbol, OpenAI::EmbeddingModel] + # @return [String, Symbol, OpenAI::Models::EmbeddingModel] required :model, union: -> { OpenAI::EmbeddingCreateParams::Model } # @!attribute dimensions @@ -42,7 +42,7 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). # - # @return [Symbol, OpenAI::EmbeddingCreateParams::EncodingFormat, nil] + # @return [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat, nil] optional :encoding_format, enum: -> { OpenAI::EmbeddingCreateParams::EncodingFormat } # @!attribute user @@ -59,11 +59,11 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # # @param input [String, Array, Array, Array>] Input text to embed, encoded as a string or array of tokens. To embed multiple i # - # @param model [String, Symbol, OpenAI::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param dimensions [Integer] The number of dimensions the resulting output embeddings should have. Only suppo # - # @param encoding_format [Symbol, OpenAI::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http + # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # @@ -85,21 +85,17 @@ module Input variant String # The array of strings that will be turned into an embedding. - variant -> { OpenAI::EmbeddingCreateParams::Input::StringArray } + variant -> { OpenAI::Models::EmbeddingCreateParams::Input::StringArray } # The array of integers that will be turned into an embedding. - variant -> { OpenAI::EmbeddingCreateParams::Input::IntegerArray } + variant -> { OpenAI::Models::EmbeddingCreateParams::Input::IntegerArray } # The array of arrays containing integers that will be turned into an embedding. - variant -> { OpenAI::EmbeddingCreateParams::Input::ArrayOfToken2DArray } + variant -> { OpenAI::Models::EmbeddingCreateParams::Input::ArrayOfToken2DArray } # @!method self.variants # @return [Array(String, Array, Array, Array>)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[String], T::Array[Integer], T::Array[T::Array[Integer]]) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -124,11 +120,7 @@ module Model variant enum: -> { OpenAI::EmbeddingModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::EmbeddingModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::EmbeddingModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::EmbeddingModel)] end # The format to return the embeddings in. Can be either `float` or diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index a80e9544..cdf26b6c 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -8,15 +8,19 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel include OpenAI::Internal::Type::RequestParameters # @!attribute data_source_config - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. # - # @return [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] + # @return [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] required :data_source_config, union: -> { OpenAI::EvalCreateParams::DataSourceConfig } # @!attribute testing_criteria - # A list of graders for all eval runs in this group. + # A list of graders for all eval runs in this group. Graders can reference + # variables in the data source using double curly braces notation, like + # `{{item.variable_name}}`. To reference the model's output, use the `sample` + # namespace (ie, `{{sample.output_text}}`). # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion] } @@ -41,9 +45,9 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::EvalCreateParams} for more details. # - # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. Dictates the # - # @param testing_criteria [Array] A list of graders for all eval runs in this group. + # @param testing_criteria [Array] A list of graders for all eval runs in this group. Graders can reference variabl # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -51,7 +55,8 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. module DataSourceConfig extend OpenAI::Internal::Type::Union @@ -68,7 +73,7 @@ module DataSourceConfig variant :logs, -> { OpenAI::EvalCreateParams::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. - variant :"stored-completions", -> { OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions } + variant :stored_completions, -> { OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions } class Custom < OpenAI::Internal::Type::BaseModel # @!attribute item_schema @@ -92,7 +97,7 @@ class Custom < OpenAI::Internal::Type::BaseModel # @!method initialize(item_schema:, include_sample_schema: nil, type: :custom) # Some parameter documentations has been truncated, see - # {OpenAI::EvalCreateParams::DataSourceConfig::Custom} for more details. + # {OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom} for more details. # # A CustomDataSourceConfig object that defines the schema for the data source used # for the evaluation runs. This schema is used to define the shape of the data @@ -130,12 +135,13 @@ class Logs < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :logs] The type of data source. Always `logs`. end + # @deprecated class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!attribute type - # The type of data source. Always `stored-completions`. + # The type of data source. Always `stored_completions`. # - # @return [Symbol, :"stored-completions"] - required :type, const: :"stored-completions" + # @return [Symbol, :stored_completions] + required :type, const: :stored_completions # @!attribute metadata # Metadata filters for the stored completions data source. @@ -143,26 +149,16 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!method initialize(metadata: nil, type: :"stored-completions") + # @!method initialize(metadata: nil, type: :stored_completions) # Deprecated in favor of LogsDataSourceConfig. # # @param metadata [Hash{Symbol=>Object}] Metadata filters for the stored completions data source. # - # @param type [Symbol, :"stored-completions"] The type of data source. Always `stored-completions`. + # @param type [Symbol, :stored_completions] The type of data source. Always `stored_completions`. end # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCreateParams::DataSourceConfig::Custom, - OpenAI::EvalCreateParams::DataSourceConfig::Logs, - OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions - ) - end - end + # @return [Array(OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -191,9 +187,9 @@ module TestingCriterion class LabelModel < OpenAI::Internal::Type::BaseModel # @!attribute input # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :input, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input] @@ -231,12 +227,13 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model) # Some parameter documentations has been truncated, see - # {OpenAI::EvalCreateParams::TestingCriterion::LabelModel} for more details. + # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel} for more + # details. # # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe + # @param input [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param labels [Array] The labels to classify to each item in the evaluation. # @@ -249,7 +246,7 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :label_model] The object type, which is always `label_model`. # A chat message that makes up the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. module Input extend OpenAI::Internal::Type::Union @@ -260,7 +257,7 @@ module Input # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant -> { OpenAI::EvalItem } + variant -> { OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem } class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -281,17 +278,129 @@ class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @param role [String] The role of the message (e.g. "system", "assistant", "user"). end - # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalItem - ) + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] + required :content, + union: -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content + } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] + required :role, + enum: -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role + } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type, nil] + optional :type, + enum: -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type + } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] end end + + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem)] end end @@ -335,19 +444,7 @@ class ScoreModel < OpenAI::Models::Graders::ScoreModelGrader end # @!method self.variants - # @return [Array(OpenAI::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Graders::StringCheckGrader, OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, OpenAI::EvalCreateParams::TestingCriterion::Python, OpenAI::EvalCreateParams::TestingCriterion::ScoreModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCreateParams::TestingCriterion::LabelModel, - OpenAI::Graders::StringCheckGrader, - OpenAI::EvalCreateParams::TestingCriterion::TextSimilarity, - OpenAI::EvalCreateParams::TestingCriterion::Python, - OpenAI::EvalCreateParams::TestingCriterion::ScoreModel - ) - end - end + # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalCreateParams::TestingCriterion::TextSimilarity, OpenAI::Models::EvalCreateParams::TestingCriterion::Python, OpenAI::Models::EvalCreateParams::TestingCriterion::ScoreModel)] end end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index caf24f3a..64d013fc 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -19,7 +19,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalCreateResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion] } @@ -61,19 +61,19 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o # # @param id [String] Unique identifier for the evaluation. # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -95,23 +95,55 @@ module DataSourceConfig # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + variant :logs, -> { OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. - variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } - # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - end + class Logs < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `logs`. + # + # @return [Symbol, :logs] + required :type, const: :logs + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!method initialize(schema:, metadata: nil, type: :logs) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs} for more details. + # + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + # + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. end + + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -175,19 +207,7 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader end # @!method self.variants - # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel - ) - end - end + # @return [Array(OpenAI::Models::Graders::LabelModelGrader, OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/eval_custom_data_source_config.rb b/lib/openai/models/eval_custom_data_source_config.rb index 38c0e0ec..5ff8b5db 100644 --- a/lib/openai/models/eval_custom_data_source_config.rb +++ b/lib/openai/models/eval_custom_data_source_config.rb @@ -18,7 +18,7 @@ class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel # @!method initialize(schema:, type: :custom) # Some parameter documentations has been truncated, see - # {OpenAI::EvalCustomDataSourceConfig} for more details. + # {OpenAI::Models::EvalCustomDataSourceConfig} for more details. # # A CustomDataSourceConfig which specifies the schema of your `item` and # optionally `sample` namespaces. The response schema defines the shape of the diff --git a/lib/openai/models/eval_item.rb b/lib/openai/models/eval_item.rb deleted file mode 100644 index b134b33b..00000000 --- a/lib/openai/models/eval_item.rb +++ /dev/null @@ -1,119 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - class EvalItem < OpenAI::Internal::Type::BaseModel - # @!attribute content - # Text inputs to the model - can contain template strings. - # - # @return [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText] - required :content, union: -> { OpenAI::EvalItem::Content } - - # @!attribute role - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @return [Symbol, OpenAI::EvalItem::Role] - required :role, enum: -> { OpenAI::EvalItem::Role } - - # @!attribute type - # The type of the message input. Always `message`. - # - # @return [Symbol, OpenAI::EvalItem::Type, nil] - optional :type, enum: -> { OpenAI::EvalItem::Type } - - # @!method initialize(content:, role:, type: nil) - # Some parameter documentations has been truncated, see {OpenAI::EvalItem} for - # more details. - # - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - # - # @param content [String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. - # - # @param role [Symbol, OpenAI::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or - # - # @param type [Symbol, OpenAI::EvalItem::Type] The type of the message input. Always `message`. - - # Text inputs to the model - can contain template strings. - # - # @see OpenAI::EvalItem#content - module Content - extend OpenAI::Internal::Type::Union - - # A text input to the model. - variant String - - # A text input to the model. - variant -> { OpenAI::Responses::ResponseInputText } - - # A text output from the model. - variant -> { OpenAI::EvalItem::Content::OutputText } - - class OutputText < OpenAI::Internal::Type::BaseModel - # @!attribute text - # The text output from the model. - # - # @return [String] - required :text, String - - # @!attribute type - # The type of the output text. Always `output_text`. - # - # @return [Symbol, :output_text] - required :type, const: :output_text - - # @!method initialize(text:, type: :output_text) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalItem::Content::OutputText} for more details. - # - # A text output from the model. - # - # @param text [String] The text output from the model. - # - # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. - end - - # @!method self.variants - # @return [Array(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(String, OpenAI::Responses::ResponseInputText, OpenAI::EvalItem::Content::OutputText) - end - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - # - # @see OpenAI::EvalItem#role - module Role - extend OpenAI::Internal::Type::Enum - - USER = :user - ASSISTANT = :assistant - SYSTEM = :system - DEVELOPER = :developer - - # @!method self.values - # @return [Array] - end - - # The type of the message input. Always `message`. - # - # @see OpenAI::EvalItem#type - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE = :message - - # @!method self.values - # @return [Array] - end - end - end -end diff --git a/lib/openai/models/eval_list_params.rb b/lib/openai/models/eval_list_params.rb index c772f07f..ff5f8d60 100644 --- a/lib/openai/models/eval_list_params.rb +++ b/lib/openai/models/eval_list_params.rb @@ -23,14 +23,14 @@ class EvalListParams < OpenAI::Internal::Type::BaseModel # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for # descending order. # - # @return [Symbol, OpenAI::EvalListParams::Order, nil] + # @return [Symbol, OpenAI::Models::EvalListParams::Order, nil] optional :order, enum: -> { OpenAI::EvalListParams::Order } # @!attribute order_by # Evals can be ordered by creation time or last updated time. Use `created_at` for # creation time or `updated_at` for last updated time. # - # @return [Symbol, OpenAI::EvalListParams::OrderBy, nil] + # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy, nil] optional :order_by, enum: -> { OpenAI::EvalListParams::OrderBy } # @!method initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}) @@ -41,9 +41,9 @@ class EvalListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of evals to retrieve. # - # @param order [Symbol, OpenAI::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d + # @param order [Symbol, OpenAI::Models::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d # - # @param order_by [Symbol, OpenAI::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use + # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index 577ef0b7..db2de0e9 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -19,7 +19,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalListResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion] } @@ -61,19 +61,19 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o # # @param id [String] Unique identifier for the evaluation. # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -95,23 +95,55 @@ module DataSourceConfig # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + variant :logs, -> { OpenAI::Models::EvalListResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. - variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } - # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - end + class Logs < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `logs`. + # + # @return [Symbol, :logs] + required :type, const: :logs + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!method initialize(schema:, metadata: nil, type: :logs) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalListResponse::DataSourceConfig::Logs} for more details. + # + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + # + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. end + + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -175,19 +207,7 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader end # @!method self.variants - # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel - ) - end - end + # @return [Array(OpenAI::Models::Graders::LabelModelGrader, OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/eval_logs_data_source_config.rb b/lib/openai/models/eval_logs_data_source_config.rb deleted file mode 100644 index 0412bec6..00000000 --- a/lib/openai/models/eval_logs_data_source_config.rb +++ /dev/null @@ -1,47 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - class EvalLogsDataSourceConfig < OpenAI::Internal::Type::BaseModel - # @!attribute schema - # The json schema for the run data source items. Learn how to build JSON schemas - # [here](https://json-schema.org/). - # - # @return [Hash{Symbol=>Object}] - required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute type - # The type of data source. Always `logs`. - # - # @return [Symbol, :logs] - required :type, const: :logs - - # @!attribute metadata - # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - # - # @return [Hash{Symbol=>String}, nil] - optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - - # @!method initialize(schema:, metadata: nil, type: :logs) - # Some parameter documentations has been truncated, see - # {OpenAI::EvalLogsDataSourceConfig} for more details. - # - # A LogsDataSourceConfig which specifies the metadata property of your logs query. - # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The - # schema returned by this data source config is used to defined what variables are - # available in your evals. `item` and `sample` are both defined when using this - # data source config. - # - # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. - # - # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be - # - # @param type [Symbol, :logs] The type of data source. Always `logs`. - end - end -end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index 0b7c4ad7..04a1e866 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -19,7 +19,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalRetrieveResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion] } @@ -61,19 +61,19 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o # # @param id [String] Unique identifier for the evaluation. # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -95,23 +95,55 @@ module DataSourceConfig # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + variant :logs, -> { OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. - variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } - # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - end + class Logs < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `logs`. + # + # @return [Symbol, :logs] + required :type, const: :logs + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!method initialize(schema:, metadata: nil, type: :logs) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs} for more details. + # + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + # + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. end + + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -175,19 +207,7 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader end # @!method self.variants - # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel - ) - end - end + # @return [Array(OpenAI::Models::Graders::LabelModelGrader, OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/eval_stored_completions_data_source_config.rb b/lib/openai/models/eval_stored_completions_data_source_config.rb index 28bdf315..2a57fdfd 100644 --- a/lib/openai/models/eval_stored_completions_data_source_config.rb +++ b/lib/openai/models/eval_stored_completions_data_source_config.rb @@ -12,10 +12,10 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] # @!attribute type - # The type of data source. Always `stored-completions`. + # The type of data source. Always `stored_completions`. # - # @return [Symbol, :"stored-completions"] - required :type, const: :"stored-completions" + # @return [Symbol, :stored_completions] + required :type, const: :stored_completions # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -28,9 +28,9 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!method initialize(schema:, metadata: nil, type: :"stored-completions") + # @!method initialize(schema:, metadata: nil, type: :stored_completions) # Some parameter documentations has been truncated, see - # {OpenAI::EvalStoredCompletionsDataSourceConfig} for more details. + # {OpenAI::Models::EvalStoredCompletionsDataSourceConfig} for more details. # # Deprecated in favor of LogsDataSourceConfig. # @@ -38,7 +38,7 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param type [Symbol, :"stored-completions"] The type of data source. Always `stored-completions`. + # @param type [Symbol, :stored_completions] The type of data source. Always `stored_completions`. end end end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index b8357d8e..475374c9 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -19,7 +19,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source_config # Configuration of data sources used in runs of the evaluation. # - # @return [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] + # @return [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] required :data_source_config, union: -> { OpenAI::Models::EvalUpdateResponse::DataSourceConfig } # @!attribute metadata @@ -48,7 +48,7 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # @!attribute testing_criteria # A list of testing criteria. # - # @return [Array] + # @return [Array] required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion] } @@ -61,19 +61,19 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o # # @param id [String] Unique identifier for the evaluation. # # @param created_at [Integer] The Unix timestamp (in seconds) for when the eval was created. # - # @param data_source_config [OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] Configuration of data sources used in runs of the evaluation. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String] The name of the evaluation. # - # @param testing_criteria [Array] A list of testing criteria. + # @param testing_criteria [Array] A list of testing criteria. # # @param object [Symbol, :eval] The object type. @@ -95,23 +95,55 @@ module DataSourceConfig # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. # The schema returned by this data source config is used to defined what variables are available in your evals. # `item` and `sample` are both defined when using this data source config. - variant :logs, -> { OpenAI::EvalLogsDataSourceConfig } + variant :logs, -> { OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs } # Deprecated in favor of LogsDataSourceConfig. - variant :"stored-completions", -> { OpenAI::EvalStoredCompletionsDataSourceConfig } + variant :stored_completions, -> { OpenAI::EvalStoredCompletionsDataSourceConfig } - # @!method self.variants - # @return [Array(OpenAI::EvalCustomDataSourceConfig, OpenAI::EvalLogsDataSourceConfig, OpenAI::EvalStoredCompletionsDataSourceConfig)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - end + class Logs < OpenAI::Internal::Type::BaseModel + # @!attribute schema + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + # + # @return [Hash{Symbol=>Object}] + required :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute type + # The type of data source. Always `logs`. + # + # @return [Symbol, :logs] + required :type, const: :logs + + # @!attribute metadata + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + # + # @return [Hash{Symbol=>String}, nil] + optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!method initialize(schema:, metadata: nil, type: :logs) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs} for more details. + # + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + # + # @param schema [Hash{Symbol=>Object}] The json schema for the run data source items. + # + # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be + # + # @param type [Symbol, :logs] The type of data source. Always `logs`. end + + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -175,19 +207,7 @@ class EvalGraderScoreModel < OpenAI::Models::Graders::ScoreModelGrader end # @!method self.variants - # @return [Array(OpenAI::Graders::LabelModelGrader, OpenAI::Graders::StringCheckGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel - ) - end - end + # @return [Array(OpenAI::Models::Graders::LabelModelGrader, OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel)] end end end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 8f17fd52..28f9e688 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -5,20 +5,24 @@ module Models module Evals class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source - # A StoredCompletionsRunDataSource configuration describing a set of filters + # Determines what populates the `item` namespace in this run's data source. # - # @return [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] required :source, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source } # @!attribute type # The type of run data source. Always `completions`. # - # @return [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type] + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] required :type, enum: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type } # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. # - # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference, nil] + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference, nil] optional :input_messages, union: -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages @@ -32,41 +36,98 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute sampling_params # - # @return [OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, nil] + # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, nil] optional :sampling_params, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalCompletionsRunDataSource} for more details. + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource} for more details. # # A CompletionsRunDataSource object describing a model sampling configuration. # - # @param source [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] A StoredCompletionsRunDataSource configuration describing a set of filters + # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] Determines what populates the `item` namespace in this run's data source. # - # @param type [Symbol, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Type] The type of run data source. Always `completions`. + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] The type of run data source. Always `completions`. # - # @param input_messages [OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i # # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). # - # @param sampling_params [OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] + # @param sampling_params [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] - # A StoredCompletionsRunDataSource configuration describing a set of filters + # Determines what populates the `item` namespace in this run's data source. # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#source + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#source module Source extend OpenAI::Internal::Type::Union discriminator :type - variant :file_content, -> { OpenAI::Evals::EvalJSONLFileContentSource } + variant :file_content, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent } - variant :file_id, -> { OpenAI::Evals::EvalJSONLFileIDSource } + variant :file_id, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID } # A StoredCompletionsRunDataSource configuration describing a set of filters variant :stored_completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions } + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] + } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of source. Always `stored_completions`. @@ -111,7 +172,7 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @!method initialize(created_after: nil, created_before: nil, limit: nil, metadata: nil, model: nil, type: :stored_completions) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions} + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions} # for more details. # # A StoredCompletionsRunDataSource configuration describing a set of filters @@ -130,22 +191,12 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions - ) - end - end + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] end # The type of run data source. Always `completions`. # - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#type + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#type module Type extend OpenAI::Internal::Type::Enum @@ -155,7 +206,12 @@ module Type # @return [Array] end - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#input_messages module InputMessages extend OpenAI::Internal::Type::Union @@ -169,9 +225,9 @@ module InputMessages class Template < OpenAI::Internal::Type::BaseModel # @!attribute template # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. # - # @return [Array] + # @return [Array] required :template, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template] @@ -185,10 +241,10 @@ class Template < OpenAI::Internal::Type::BaseModel # @!method initialize(template:, type: :template) # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template} for - # more details. + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template} + # for more details. # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe # # @param type [Symbol, :template] The type of input messages. Always `template`. @@ -214,20 +270,140 @@ module Template # precedence over instructions given with the `user` role. Messages with the # `assistant` role are presumed to have been generated by the model in previous # interactions. - variant :message, -> { OpenAI::EvalItem } + variant :message, + -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + } + + class Message < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] + required :content, + union: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content + } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] + required :role, + enum: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role + } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type, nil] + optional :type, + enum: -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type + } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end # @!method self.variants - # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem) } - end + # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message)] end end class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.input_trajectory" # # @return [String] required :item_reference, String @@ -239,25 +415,16 @@ class ItemReference < OpenAI::Internal::Type::BaseModel required :type, const: :item_reference # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.input_trajectory" # # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. end # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, - OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference - ) - end - end + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] end - # @see OpenAI::Evals::CreateEvalCompletionsRunDataSource#sampling_params + # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#sampling_params class SamplingParams < OpenAI::Internal::Type::BaseModel # @!attribute max_completion_tokens # The maximum number of tokens in the generated output. diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index 3819554d..fc29873a 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -5,8 +5,9 @@ module Models module Evals class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # @!attribute source + # Determines what populates the `item` namespace in the data source. # - # @return [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] required :source, union: -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source } # @!attribute type @@ -19,28 +20,81 @@ class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # A JsonlRunDataSource object with that specifies a JSONL file that matches the # eval # - # @param source [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource] + # @param source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] Determines what populates the `item` namespace in the data source. # # @param type [Symbol, :jsonl] The type of data source. Always `jsonl`. - # @see OpenAI::Evals::CreateEvalJSONLRunDataSource#source + # Determines what populates the `item` namespace in the data source. + # + # @see OpenAI::Models::Evals::CreateEvalJSONLRunDataSource#source module Source extend OpenAI::Internal::Type::Union discriminator :type - variant :file_content, -> { OpenAI::Evals::EvalJSONLFileContentSource } + variant :file_content, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent } - variant :file_id, -> { OpenAI::Evals::EvalJSONLFileIDSource } + variant :file_id, -> { OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID } - # @!method self.variants - # @return [Array(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource)] + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] + } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource) + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] end end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] end end end diff --git a/lib/openai/models/evals/create_eval_responses_run_data_source.rb b/lib/openai/models/evals/create_eval_responses_run_data_source.rb deleted file mode 100644 index 95b9d3c8..00000000 --- a/lib/openai/models/evals/create_eval_responses_run_data_source.rb +++ /dev/null @@ -1,363 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Evals - class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - # @!attribute source - # A EvalResponsesSource object describing a run data source configuration. - # - # @return [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses] - required :source, union: -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::Source } - - # @!attribute type - # The type of run data source. Always `responses`. - # - # @return [Symbol, OpenAI::Evals::CreateEvalResponsesRunDataSource::Type] - required :type, enum: -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::Type } - - # @!attribute input_messages - # - # @return [OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] - optional :input_messages, union: -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages } - - # @!attribute model - # The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @return [String, nil] - optional :model, String - - # @!attribute sampling_params - # - # @return [OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams, nil] - optional :sampling_params, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams } - - # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalResponsesRunDataSource} for more details. - # - # A ResponsesRunDataSource object describing a model sampling configuration. - # - # @param source [OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses] A EvalResponsesSource object describing a run data source configuration. - # - # @param type [Symbol, OpenAI::Evals::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `responses`. - # - # @param input_messages [OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] - # - # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). - # - # @param sampling_params [OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams] - - # A EvalResponsesSource object describing a run data source configuration. - # - # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#source - module Source - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :file_content, -> { OpenAI::Evals::EvalJSONLFileContentSource } - - variant :file_id, -> { OpenAI::Evals::EvalJSONLFileIDSource } - - # A EvalResponsesSource object describing a run data source configuration. - variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses } - - class Responses < OpenAI::Internal::Type::BaseModel - # @!attribute type - # The type of run data source. Always `responses`. - # - # @return [Symbol, :responses] - required :type, const: :responses - - # @!attribute created_after - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_after, Integer, nil?: true - - # @!attribute created_before - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - # - # @return [Integer, nil] - optional :created_before, Integer, nil?: true - - # @!attribute has_tool_calls - # Whether the response has tool calls. This is a query parameter used to select - # responses. - # - # @return [Boolean, nil] - optional :has_tool_calls, OpenAI::Internal::Type::Boolean, nil?: true - - # @!attribute instructions_search - # Optional string to search the 'instructions' field. This is a query parameter - # used to select responses. - # - # @return [String, nil] - optional :instructions_search, String, nil?: true - - # @!attribute metadata - # Metadata filter for the responses. This is a query parameter used to select - # responses. - # - # @return [Object, nil] - optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true - - # @!attribute model - # The name of the model to find responses for. This is a query parameter used to - # select responses. - # - # @return [String, nil] - optional :model, String, nil?: true - - # @!attribute reasoning_effort - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - # - # @return [Symbol, OpenAI::ReasoningEffort, nil] - optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true - - # @!attribute temperature - # Sampling temperature. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :temperature, Float, nil?: true - - # @!attribute tools - # List of tool names. This is a query parameter used to select responses. - # - # @return [Array, nil] - optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true - - # @!attribute top_p - # Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @return [Float, nil] - optional :top_p, Float, nil?: true - - # @!attribute users - # List of user identifiers. This is a query parameter used to select responses. - # - # @return [Array, nil] - optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true - - # @!method initialize(created_after: nil, created_before: nil, has_tool_calls: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses} for more - # details. - # - # A EvalResponsesSource object describing a run data source configuration. - # - # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par - # - # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa - # - # @param has_tool_calls [Boolean, nil] Whether the response has tool calls. This is a query parameter used to select re - # - # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us - # - # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp - # - # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s - # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re - # - # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. - # - # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. - # - # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. - # - # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. - # - # @param type [Symbol, :responses] The type of run data source. Always `responses`. - end - - # @!method self.variants - # @return [Array(OpenAI::Evals::EvalJSONLFileContentSource, OpenAI::Evals::EvalJSONLFileIDSource, OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses - ) - end - end - end - - # The type of run data source. Always `responses`. - # - # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#type - module Type - extend OpenAI::Internal::Type::Enum - - RESPONSES = :responses - - # @!method self.values - # @return [Array] - end - - # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#input_messages - module InputMessages - extend OpenAI::Internal::Type::Union - - discriminator :type - - variant :template, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template } - - variant :item_reference, - -> { OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference } - - class Template < OpenAI::Internal::Type::BaseModel - # @!attribute template - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - # - # @return [Array] - required :template, - -> { - OpenAI::Internal::Type::ArrayOf[union: OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template] - } - - # @!attribute type - # The type of input messages. Always `template`. - # - # @return [Symbol, :template] - required :type, const: :template - - # @!method initialize(template:, type: :template) - # Some parameter documentations has been truncated, see - # {OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template} for - # more details. - # - # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe - # - # @param type [Symbol, :template] The type of input messages. Always `template`. - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - variant -> { - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage - } - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - variant -> { OpenAI::EvalItem } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the message. - # - # @return [String] - required :content, String - - # @!attribute role - # The role of the message (e.g. "system", "assistant", "user"). - # - # @return [String] - required :role, String - - # @!method initialize(content:, role:) - # @param content [String] The content of the message. - # - # @param role [String] The role of the message (e.g. "system", "assistant", "user"). - end - - # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::EvalItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::EvalItem - ) - end - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - # @!attribute item_reference - # A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @return [String] - required :item_reference, String - - # @!attribute type - # The type of input messages. Always `item_reference`. - # - # @return [Symbol, :item_reference] - required :type, const: :item_reference - - # @!method initialize(item_reference:, type: :item_reference) - # @param item_reference [String] A reference to a variable in the "item" namespace. Ie, "item.name" - # - # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. - end - - # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - end - end - end - - # @see OpenAI::Evals::CreateEvalResponsesRunDataSource#sampling_params - class SamplingParams < OpenAI::Internal::Type::BaseModel - # @!attribute max_completion_tokens - # The maximum number of tokens in the generated output. - # - # @return [Integer, nil] - optional :max_completion_tokens, Integer - - # @!attribute seed - # A seed value to initialize the randomness, during sampling. - # - # @return [Integer, nil] - optional :seed, Integer - - # @!attribute temperature - # A higher temperature increases randomness in the outputs. - # - # @return [Float, nil] - optional :temperature, Float - - # @!attribute top_p - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - # - # @return [Float, nil] - optional :top_p, Float - - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) - # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. - # - # @param seed [Integer] A seed value to initialize the randomness, during sampling. - # - # @param temperature [Float] A higher temperature increases randomness in the outputs. - # - # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - end - end - end - end -end diff --git a/lib/openai/models/evals/eval_jsonl_file_content_source.rb b/lib/openai/models/evals/eval_jsonl_file_content_source.rb deleted file mode 100644 index add2daad..00000000 --- a/lib/openai/models/evals/eval_jsonl_file_content_source.rb +++ /dev/null @@ -1,45 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Evals - class EvalJSONLFileContentSource < OpenAI::Internal::Type::BaseModel - # @!attribute content - # The content of the jsonl file. - # - # @return [Array] - required :content, - -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::EvalJSONLFileContentSource::Content] } - - # @!attribute type - # The type of jsonl source. Always `file_content`. - # - # @return [Symbol, :file_content] - required :type, const: :file_content - - # @!method initialize(content:, type: :file_content) - # @param content [Array] The content of the jsonl file. - # - # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. - - class Content < OpenAI::Internal::Type::BaseModel - # @!attribute item - # - # @return [Hash{Symbol=>Object}] - required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!attribute sample - # - # @return [Hash{Symbol=>Object}, nil] - optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - - # @!method initialize(item:, sample: nil) - # @param item [Hash{Symbol=>Object}] - # @param sample [Hash{Symbol=>Object}] - end - end - end - - EvalJSONLFileContentSource = Evals::EvalJSONLFileContentSource - end -end diff --git a/lib/openai/models/evals/eval_jsonl_file_id_source.rb b/lib/openai/models/evals/eval_jsonl_file_id_source.rb deleted file mode 100644 index 3347ee13..00000000 --- a/lib/openai/models/evals/eval_jsonl_file_id_source.rb +++ /dev/null @@ -1,28 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Evals - class EvalJSONLFileIDSource < OpenAI::Internal::Type::BaseModel - # @!attribute id - # The identifier of the file. - # - # @return [String] - required :id, String - - # @!attribute type - # The type of jsonl source. Always `file_id`. - # - # @return [Symbol, :file_id] - required :type, const: :file_id - - # @!method initialize(id:, type: :file_id) - # @param id [String] The identifier of the file. - # - # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. - end - end - - EvalJSONLFileIDSource = Evals::EvalJSONLFileIDSource - end -end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 3e7dacc0..fd8642a6 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -20,13 +20,13 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id @@ -106,9 +106,9 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses] Information about the run's data source. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # @@ -145,20 +145,496 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } + variant :responses, -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses } + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute source + # Determines what populates the `item` namespace in this run's data source. + # + # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses} for more + # details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses] Determines what populates the `item` namespace in this run's data source. + # + # @param input_messages [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams] + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + + # Determines what populates the `item` namespace in this run's data source. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses} + # for more details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses)] + end - # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ) + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template] } + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the `item` namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference)] + end + + # @see OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 7437852c..bc703e7f 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -11,7 +11,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Details about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] required :data_source, union: -> { OpenAI::Evals::RunCreateParams::DataSource } # @!attribute metadata @@ -35,7 +35,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunCreateParams} for more details. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Details about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -54,20 +54,547 @@ module DataSource variant -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } + variant -> { OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource } - # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ) + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + # @!attribute source + # Determines what populates the `item` namespace in this run's data source. + # + # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] + required :source, + union: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source + } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] + required :type, + enum: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type + } + + # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages + } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, nil] + optional :sampling_params, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + + # @!method initialize(source:, type:, input_messages: nil, model: nil, sampling_params: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource} + # for more details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses] Determines what populates the `item` namespace in this run's data source. + # + # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type] The type of run data source. Always `responses`. + # + # @param input_messages [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams] + + # Determines what populates the `item` namespace in this run's data source. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent + } + + variant :file_id, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID + } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] + } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses} + # for more details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses)] + end + + # The type of run data source. Always `responses`. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#type + module Type + extend OpenAI::Internal::Type::Enum + + RESPONSES = :responses + + # @!method self.values + # @return [Array] + end + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template + } + + variant :item_reference, + -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> do + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template + ] + end + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage + } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content + } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role + } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type + } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the `item` namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference)] + end + + # @see OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource)] end end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index cc42d18c..3a110d09 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -20,13 +20,13 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id @@ -106,9 +106,9 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses] Information about the run's data source. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # @@ -145,20 +145,496 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } + variant :responses, -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses } + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute source + # Determines what populates the `item` namespace in this run's data source. + # + # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses} for more + # details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses] Determines what populates the `item` namespace in this run's data source. + # + # @param input_messages [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams] + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + + # Determines what populates the `item` namespace in this run's data source. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses} + # for more details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses)] + end - # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ) + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template] } + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the `item` namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference)] + end + + # @see OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_list_params.rb b/lib/openai/models/evals/run_list_params.rb index bfb9d83f..3e0a45b3 100644 --- a/lib/openai/models/evals/run_list_params.rb +++ b/lib/openai/models/evals/run_list_params.rb @@ -24,14 +24,14 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for # descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Evals::RunListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Evals::RunListParams::Order, nil] optional :order, enum: -> { OpenAI::Evals::RunListParams::Order } # @!attribute status # Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` # | `canceled`. # - # @return [Symbol, OpenAI::Evals::RunListParams::Status, nil] + # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status, nil] optional :status, enum: -> { OpenAI::Evals::RunListParams::Status } # @!method initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}) @@ -42,9 +42,9 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of runs to retrieve. # - # @param order [Symbol, OpenAI::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de + # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de # - # @param status [Symbol, OpenAI::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index 846ca767..83907899 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -20,13 +20,13 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id @@ -106,9 +106,9 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses] Information about the run's data source. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # @@ -145,20 +145,496 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } + variant :responses, -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses } + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute source + # Determines what populates the `item` namespace in this run's data source. + # + # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses} for more + # details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses] Determines what populates the `item` namespace in this run's data source. + # + # @param input_messages [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams] + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + + # Determines what populates the `item` namespace in this run's data source. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses} + # for more details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses)] + end - # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ) + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template] } + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the `item` namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference)] + end + + # @see OpenAI::Models::Evals::RunListResponse::DataSource::Responses#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunListResponse::DataSource::Responses)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 60f06879..9db0bb26 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -20,13 +20,13 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # @!attribute data_source # Information about the run's data source. # - # @return [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] + # @return [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses] required :data_source, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource } # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute eval_id @@ -106,9 +106,9 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] Unix timestamp (in seconds) when the evaluation run was created. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Information about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses] Information about the run's data source. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param eval_id [String] The identifier of the associated evaluation. # @@ -145,20 +145,500 @@ module DataSource variant :completions, -> { OpenAI::Evals::CreateEvalCompletionsRunDataSource } # A ResponsesRunDataSource object describing a model sampling configuration. - variant :responses, -> { OpenAI::Evals::CreateEvalResponsesRunDataSource } + variant :responses, -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses } + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute source + # Determines what populates the `item` namespace in this run's data source. + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses] + required :source, union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source } + + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute input_messages + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference, nil] + optional :input_messages, + union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages } + + # @!attribute model + # The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @return [String, nil] + optional :model, String + + # @!attribute sampling_params + # + # @return [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams, nil] + optional :sampling_params, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams } + + # @!method initialize(source:, input_messages: nil, model: nil, sampling_params: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses} for more + # details. + # + # A ResponsesRunDataSource object describing a model sampling configuration. + # + # @param source [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses] Determines what populates the `item` namespace in this run's data source. + # + # @param input_messages [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference] Used when sampling from a model. Dictates the structure of the messages passed i + # + # @param model [String] The name of the model to use for generating completions (e.g. "o3-mini"). + # + # @param sampling_params [OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams] + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + + # Determines what populates the `item` namespace in this run's data source. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses#source + module Source + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :file_content, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent } + + variant :file_id, -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID } + + # A EvalResponsesSource object describing a run data source configuration. + variant :responses, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses } + + class FileContent < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the jsonl file. + # + # @return [Array] + required :content, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content] } + + # @!attribute type + # The type of jsonl source. Always `file_content`. + # + # @return [Symbol, :file_content] + required :type, const: :file_content + + # @!method initialize(content:, type: :file_content) + # @param content [Array] The content of the jsonl file. + # + # @param type [Symbol, :file_content] The type of jsonl source. Always `file_content`. + + class Content < OpenAI::Internal::Type::BaseModel + # @!attribute item + # + # @return [Hash{Symbol=>Object}] + required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!attribute sample + # + # @return [Hash{Symbol=>Object}, nil] + optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The identifier of the file. + # + # @return [String] + required :id, String + + # @!attribute type + # The type of jsonl source. Always `file_id`. + # + # @return [Symbol, :file_id] + required :type, const: :file_id + + # @!method initialize(id:, type: :file_id) + # @param id [String] The identifier of the file. + # + # @param type [Symbol, :file_id] The type of jsonl source. Always `file_id`. + end + + class Responses < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of run data source. Always `responses`. + # + # @return [Symbol, :responses] + required :type, const: :responses + + # @!attribute created_after + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_after, Integer, nil?: true + + # @!attribute created_before + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + # + # @return [Integer, nil] + optional :created_before, Integer, nil?: true + + # @!attribute instructions_search + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + # + # @return [String, nil] + optional :instructions_search, String, nil?: true + + # @!attribute metadata + # Metadata filter for the responses. This is a query parameter used to select + # responses. + # + # @return [Object, nil] + optional :metadata, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute model + # The name of the model to find responses for. This is a query parameter used to + # select responses. + # + # @return [String, nil] + optional :model, String, nil?: true + + # @!attribute reasoning_effort + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + # + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] + optional :reasoning_effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true + + # @!attribute temperature + # Sampling temperature. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :temperature, Float, nil?: true + + # @!attribute tools + # List of tool names. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :tools, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!attribute top_p + # Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @return [Float, nil] + optional :top_p, Float, nil?: true + + # @!attribute users + # List of user identifiers. This is a query parameter used to select responses. + # + # @return [Array, nil] + optional :users, OpenAI::Internal::Type::ArrayOf[String], nil?: true + + # @!method initialize(created_after: nil, created_before: nil, instructions_search: nil, metadata: nil, model: nil, reasoning_effort: nil, temperature: nil, tools: nil, top_p: nil, users: nil, type: :responses) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses} + # for more details. + # + # A EvalResponsesSource object describing a run data source configuration. + # + # @param created_after [Integer, nil] Only include items created after this timestamp (inclusive). This is a query par + # + # @param created_before [Integer, nil] Only include items created before this timestamp (inclusive). This is a query pa + # + # @param instructions_search [String, nil] Optional string to search the 'instructions' field. This is a query parameter us + # + # @param metadata [Object, nil] Metadata filter for the responses. This is a query parameter used to select resp + # + # @param model [String, nil] The name of the model to find responses for. This is a query parameter used to s + # + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Optional reasoning effort parameter. This is a query parameter used to select re + # + # @param temperature [Float, nil] Sampling temperature. This is a query parameter used to select responses. + # + # @param tools [Array, nil] List of tool names. This is a query parameter used to select responses. + # + # @param top_p [Float, nil] Nucleus sampling parameter. This is a query parameter used to select responses. + # + # @param users [Array, nil] List of user identifiers. This is a query parameter used to select responses. + # + # @param type [Symbol, :responses] The type of run data source. Always `responses`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses)] + end - # @!method self.variants - # @return [Array(OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ) + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses#input_messages + module InputMessages + extend OpenAI::Internal::Type::Union + + discriminator :type + + variant :template, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template } + + variant :item_reference, + -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference } + + class Template < OpenAI::Internal::Type::BaseModel + # @!attribute template + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + # + # @return [Array] + required :template, + -> do + OpenAI::Internal::Type::ArrayOf[ + union: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template + ] + end + + # @!attribute type + # The type of input messages. Always `template`. + # + # @return [Symbol, :template] + required :type, const: :template + + # @!method initialize(template:, type: :template) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template} + # for more details. + # + # @param template [Array] A list of chat messages forming the prompt or context. May include variable refe + # + # @param type [Symbol, :template] The type of input messages. Always `template`. + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage } + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + # @!attribute content + # The content of the message. + # + # @return [String] + required :content, String + + # @!attribute role + # The role of the message (e.g. "system", "assistant", "user"). + # + # @return [String] + required :role, String + + # @!method initialize(content:, role:) + # @param content [String] The content of the message. + # + # @param role [String] The role of the message (e.g. "system", "assistant", "user"). + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] + required :content, + union: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] + required :role, + enum: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type, nil] + optional :type, + enum: -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem} + # for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText} + # for more details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem)] + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + # @!attribute item_reference + # A reference to a variable in the `item` namespace. Ie, "item.name" + # + # @return [String] + required :item_reference, String + + # @!attribute type + # The type of input messages. Always `item_reference`. + # + # @return [Symbol, :item_reference] + required :type, const: :item_reference + + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] A reference to a variable in the `item` namespace. Ie, "item.name" + # + # @param type [Symbol, :item_reference] The type of input messages. Always `item_reference`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference)] + end + + # @see OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses#sampling_params + class SamplingParams < OpenAI::Internal::Type::BaseModel + # @!attribute max_completion_tokens + # The maximum number of tokens in the generated output. + # + # @return [Integer, nil] + optional :max_completion_tokens, Integer + + # @!attribute seed + # A seed value to initialize the randomness, during sampling. + # + # @return [Integer, nil] + optional :seed, Integer + + # @!attribute temperature + # A higher temperature increases randomness in the outputs. + # + # @return [Float, nil] + optional :temperature, Float + + # @!attribute top_p + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + # + # @return [Float, nil] + optional :top_p, Float + + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] The maximum number of tokens in the generated output. + # + # @param seed [Integer] A seed value to initialize the randomness, during sampling. + # + # @param temperature [Float] A higher temperature increases randomness in the outputs. + # + # @param top_p [Float] An alternative to temperature for nucleus sampling; 1.0 includes all tokens. end end + + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/evals/runs/output_item_list_params.rb b/lib/openai/models/evals/runs/output_item_list_params.rb index 0d0e6406..7fd27487 100644 --- a/lib/openai/models/evals/runs/output_item_list_params.rb +++ b/lib/openai/models/evals/runs/output_item_list_params.rb @@ -30,14 +30,14 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel # Sort order for output items by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. # - # @return [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order, nil] optional :order, enum: -> { OpenAI::Evals::Runs::OutputItemListParams::Order } # @!attribute status # Filter output items by status. Use `failed` to filter by failed output items or # `pass` to filter by passed output items. # - # @return [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Status, nil] + # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status, nil] optional :status, enum: -> { OpenAI::Evals::Runs::OutputItemListParams::Status } # @!method initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}) @@ -50,9 +50,9 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of output items to retrieve. # - # @param order [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Order] Sort order for output items by timestamp. Use `asc` for ascending order or `desc + # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] Sort order for output items by timestamp. Use `asc` for ascending order or `desc # - # @param status [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Status] Filter output items by status. Use `failed` to filter by failed output + # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] Filter output items by status. Use `failed` to filter by failed output # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/evals/runs/output_item_list_response.rb b/lib/openai/models/evals/runs/output_item_list_response.rb index 85505173..d3271c97 100644 --- a/lib/openai/models/evals/runs/output_item_list_response.rb +++ b/lib/openai/models/evals/runs/output_item_list_response.rb @@ -95,7 +95,7 @@ class Sample < OpenAI::Internal::Type::BaseModel # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute finish_reason @@ -160,7 +160,7 @@ class Sample < OpenAI::Internal::Type::BaseModel # # A sample containing the input and output of the evaluation run. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param finish_reason [String] The reason why the sample generation was finished. # diff --git a/lib/openai/models/evals/runs/output_item_retrieve_response.rb b/lib/openai/models/evals/runs/output_item_retrieve_response.rb index 5a75e4a0..e43f1fcf 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_response.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_response.rb @@ -95,7 +95,7 @@ class Sample < OpenAI::Internal::Type::BaseModel # @!attribute error # An object representing an error response from the Eval API. # - # @return [OpenAI::Evals::EvalAPIError] + # @return [OpenAI::Models::Evals::EvalAPIError] required :error, -> { OpenAI::Evals::EvalAPIError } # @!attribute finish_reason @@ -161,7 +161,7 @@ class Sample < OpenAI::Internal::Type::BaseModel # # A sample containing the input and output of the evaluation run. # - # @param error [OpenAI::Evals::EvalAPIError] An object representing an error response from the Eval API. + # @param error [OpenAI::Models::Evals::EvalAPIError] An object representing an error response from the Eval API. # # @param finish_reason [String] The reason why the sample generation was finished. # diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 117b02fa..9d196745 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -14,13 +14,7 @@ module FileChunkingStrategy variant :other, -> { OpenAI::OtherFileChunkingStrategyObject } # @!method self.variants - # @return [Array(OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject) - end - end + # @return [Array(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject)] end end end diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index c70c3336..8f756013 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -16,13 +16,7 @@ module FileChunkingStrategyParam variant :static, -> { OpenAI::StaticFileChunkingStrategyObjectParam } # @!method self.variants - # @return [Array(OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam) - end - end + # @return [Array(OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam)] end end end diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index c88d1052..44e48264 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -10,7 +10,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute file # The File object (not file name) to be uploaded. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart] required :file, OpenAI::Internal::Type::FileInput # @!attribute purpose @@ -19,16 +19,16 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # fine-tuning - `vision`: Images used for vision fine-tuning - `user_data`: # Flexible file type for any purpose - `evals`: Used for eval data sets # - # @return [Symbol, OpenAI::FilePurpose] + # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::FilePurpose } # @!method initialize(file:, purpose:, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::FileCreateParams} for more details. # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The File object (not file name) to be uploaded. + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. # - # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 82ca75f1..193eebac 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -27,7 +27,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::FileListParams::Order, nil] + # @return [Symbol, OpenAI::Models::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::FileListParams::Order } # @!attribute purpose @@ -44,7 +44,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param purpose [String] Only return files with the given purpose. # diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index ec5c9839..ac42a195 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -39,7 +39,7 @@ class FileObject < OpenAI::Internal::Type::BaseModel # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. # - # @return [Symbol, OpenAI::FileObject::Purpose] + # @return [Symbol, OpenAI::Models::FileObject::Purpose] required :purpose, enum: -> { OpenAI::FileObject::Purpose } # @!attribute status @@ -48,7 +48,7 @@ class FileObject < OpenAI::Internal::Type::BaseModel # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. # - # @return [Symbol, OpenAI::FileObject::Status] + # @return [Symbol, OpenAI::Models::FileObject::Status] required :status, enum: -> { OpenAI::FileObject::Status } # @!attribute expires_at @@ -67,8 +67,8 @@ class FileObject < OpenAI::Internal::Type::BaseModel optional :status_details, String # @!method initialize(id:, bytes:, created_at:, filename:, purpose:, status:, expires_at: nil, status_details: nil, object: :file) - # Some parameter documentations has been truncated, see {OpenAI::FileObject} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::FileObject} for more details. # # The `File` object represents a document that has been uploaded to OpenAI. # @@ -80,9 +80,9 @@ class FileObject < OpenAI::Internal::Type::BaseModel # # @param filename [String] The name of the file. # - # @param purpose [Symbol, OpenAI::FileObject::Purpose] The intended purpose of the file. Supported values are `assistants`, `assistants + # @param purpose [Symbol, OpenAI::Models::FileObject::Purpose] The intended purpose of the file. Supported values are `assistants`, `assistants # - # @param status [Symbol, OpenAI::FileObject::Status] Deprecated. The current status of the file, which can be either `uploaded`, `pro + # @param status [Symbol, OpenAI::Models::FileObject::Status] Deprecated. The current status of the file, which can be either `uploaded`, `pro # # @param expires_at [Integer] The Unix timestamp (in seconds) for when the file will expire. # @@ -94,7 +94,7 @@ class FileObject < OpenAI::Internal::Type::BaseModel # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` # and `vision`. # - # @see OpenAI::FileObject#purpose + # @see OpenAI::Models::FileObject#purpose module Purpose extend OpenAI::Internal::Type::Enum @@ -115,7 +115,7 @@ module Purpose # Deprecated. The current status of the file, which can be either `uploaded`, # `processed`, or `error`. # - # @see OpenAI::FileObject#status + # @see OpenAI::Models::FileObject#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb index e425c638..0dfe4ffc 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_run_params.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_run_params.rb @@ -12,7 +12,7 @@ class GraderRunParams < OpenAI::Internal::Type::BaseModel # @!attribute grader # The grader used for the fine-tuning job. # - # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] + # @return [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] required :grader, union: -> { OpenAI::FineTuning::Alpha::GraderRunParams::Grader } # @!attribute model_sample @@ -31,7 +31,7 @@ class GraderRunParams < OpenAI::Internal::Type::BaseModel } # @!method initialize(grader:, model_sample:, reference_answer:, request_options: {}) - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # # @param model_sample [String] The model sample to be evaluated. # @@ -61,19 +61,7 @@ module Grader variant :multi, -> { OpenAI::Graders::MultiGrader } # @!method self.variants - # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) - end - end + # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader)] end # The reference answer for the evaluation. @@ -84,17 +72,13 @@ module ReferenceAnswer variant OpenAI::Internal::Type::Unknown - variant -> { OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::UnionMember2Array } + variant -> { OpenAI::Models::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::UnionMember2Array } variant Float # @!method self.variants # @return [Array(String, Object, Array, Float)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T.anything, T::Array[T.anything], Float) } - end - # @type [OpenAI::Internal::Type::Converter] UnionMember2Array = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] end diff --git a/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb b/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb index 6ec580aa..fb0650a0 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_validate_params.rb @@ -12,11 +12,11 @@ class GraderValidateParams < OpenAI::Internal::Type::BaseModel # @!attribute grader # The grader used for the fine-tuning job. # - # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] + # @return [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] required :grader, union: -> { OpenAI::FineTuning::Alpha::GraderValidateParams::Grader } # @!method initialize(grader:, request_options: {}) - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -40,19 +40,7 @@ module Grader variant -> { OpenAI::Graders::MultiGrader } # @!method self.variants - # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) - end - end + # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader)] end end end diff --git a/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb b/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb index f0c28dcc..9d7458fc 100644 --- a/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb +++ b/lib/openai/models/fine_tuning/alpha/grader_validate_response.rb @@ -9,11 +9,11 @@ class GraderValidateResponse < OpenAI::Internal::Type::BaseModel # @!attribute grader # The grader used for the fine-tuning job. # - # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader, nil] + # @return [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader, nil] optional :grader, union: -> { OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::Grader } # @!method initialize(grader: nil) - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # The grader used for the fine-tuning job. # @@ -37,19 +37,7 @@ module Grader variant -> { OpenAI::Graders::MultiGrader } # @!method self.variants - # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) - end - end + # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader)] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb index 9bdaffeb..d49a0e2d 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb @@ -24,7 +24,7 @@ class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel # @!attribute order # The order in which to retrieve permissions. # - # @return [Symbol, OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order, nil] + # @return [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order, nil] optional :order, enum: -> { OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order } # @!attribute project_id @@ -38,7 +38,7 @@ class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] Number of permissions to retrieve. # - # @param order [Symbol, OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. + # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. # # @param project_id [String] The ID of the project to get permissions for. # diff --git a/lib/openai/models/fine_tuning/dpo_hyperparameters.rb b/lib/openai/models/fine_tuning/dpo_hyperparameters.rb index 54b87256..e5dd0bfc 100644 --- a/lib/openai/models/fine_tuning/dpo_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/dpo_hyperparameters.rb @@ -35,7 +35,7 @@ class DpoHyperparameters < OpenAI::Internal::Type::BaseModel # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::DpoHyperparameters} for more details. + # {OpenAI::Models::FineTuning::DpoHyperparameters} for more details. # # The hyperparameters used for the DPO fine-tuning job. # @@ -50,7 +50,7 @@ class DpoHyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::FineTuning::DpoHyperparameters#batch_size + # @see OpenAI::Models::FineTuning::DpoHyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -60,16 +60,12 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. # - # @see OpenAI::FineTuning::DpoHyperparameters#beta + # @see OpenAI::Models::FineTuning::DpoHyperparameters#beta module Beta extend OpenAI::Internal::Type::Union @@ -79,16 +75,12 @@ module Beta # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::FineTuning::DpoHyperparameters#learning_rate_multiplier + # @see OpenAI::Models::FineTuning::DpoHyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -98,16 +90,12 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::FineTuning::DpoHyperparameters#n_epochs + # @see OpenAI::Models::FineTuning::DpoHyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -117,10 +105,6 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end end end diff --git a/lib/openai/models/fine_tuning/dpo_method.rb b/lib/openai/models/fine_tuning/dpo_method.rb index 023cf72f..57bfe306 100644 --- a/lib/openai/models/fine_tuning/dpo_method.rb +++ b/lib/openai/models/fine_tuning/dpo_method.rb @@ -7,13 +7,13 @@ class DpoMethod < OpenAI::Internal::Type::BaseModel # @!attribute hyperparameters # The hyperparameters used for the DPO fine-tuning job. # - # @return [OpenAI::FineTuning::DpoHyperparameters, nil] + # @return [OpenAI::Models::FineTuning::DpoHyperparameters, nil] optional :hyperparameters, -> { OpenAI::FineTuning::DpoHyperparameters } # @!method initialize(hyperparameters: nil) # Configuration for the DPO fine-tuning method. # - # @param hyperparameters [OpenAI::FineTuning::DpoHyperparameters] The hyperparameters used for the DPO fine-tuning job. + # @param hyperparameters [OpenAI::Models::FineTuning::DpoHyperparameters] The hyperparameters used for the DPO fine-tuning job. end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 189aabae..64df4360 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -21,7 +21,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. # - # @return [OpenAI::FineTuning::FineTuningJob::Error, nil] + # @return [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] required :error, -> { OpenAI::FineTuning::FineTuningJob::Error }, nil?: true # @!attribute fine_tuned_model @@ -42,7 +42,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. # - # @return [OpenAI::FineTuning::FineTuningJob::Hyperparameters] + # @return [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] required :hyperparameters, -> { OpenAI::FineTuning::FineTuningJob::Hyperparameters } # @!attribute model @@ -81,7 +81,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. # - # @return [Symbol, OpenAI::FineTuning::FineTuningJob::Status] + # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] required :status, enum: -> { OpenAI::FineTuning::FineTuningJob::Status } # @!attribute trained_tokens @@ -116,7 +116,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute integrations # A list of integrations to enable for this fine-tuning job. # - # @return [Array, nil] + # @return [Array, nil] optional :integrations, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject] @@ -137,12 +137,12 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @!attribute method_ # The method used for fine-tuning. # - # @return [OpenAI::FineTuning::FineTuningJob::Method, nil] + # @return [OpenAI::Models::FineTuning::FineTuningJob::Method, nil] optional :method_, -> { OpenAI::FineTuning::FineTuningJob::Method }, api_name: :method # @!method initialize(id:, created_at:, error:, fine_tuned_model:, finished_at:, hyperparameters:, model:, organization_id:, result_files:, seed:, status:, trained_tokens:, training_file:, validation_file:, estimated_finish: nil, integrations: nil, metadata: nil, method_: nil, object: :"fine_tuning.job") # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJob} for more details. + # {OpenAI::Models::FineTuning::FineTuningJob} for more details. # # The `fine_tuning.job` object represents a fine-tuning job that has been created # through the API. @@ -151,13 +151,13 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the fine-tuning job was created. # - # @param error [OpenAI::FineTuning::FineTuningJob::Error, nil] For fine-tuning jobs that have `failed`, this will contain more information on t + # @param error [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] For fine-tuning jobs that have `failed`, this will contain more information on t # # @param fine_tuned_model [String, nil] The name of the fine-tuned model that is being created. The value will be null i # # @param finished_at [Integer, nil] The Unix timestamp (in seconds) for when the fine-tuning job was finished. The v # - # @param hyperparameters [OpenAI::FineTuning::FineTuningJob::Hyperparameters] The hyperparameters used for the fine-tuning job. This value will only be return + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] The hyperparameters used for the fine-tuning job. This value will only be return # # @param model [String] The base model that is being fine-tuned. # @@ -167,7 +167,7 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # # @param seed [Integer] The seed used for the fine-tuning job. # - # @param status [Symbol, OpenAI::FineTuning::FineTuningJob::Status] The current status of the fine-tuning job, which can be either `validating_files + # @param status [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] The current status of the fine-tuning job, which can be either `validating_files # # @param trained_tokens [Integer, nil] The total number of billable tokens processed by this fine-tuning job. The value # @@ -177,15 +177,15 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # # @param estimated_finish [Integer, nil] The Unix timestamp (in seconds) for when the fine-tuning job is estimated to fin # - # @param integrations [Array, nil] A list of integrations to enable for this fine-tuning job. + # @param integrations [Array, nil] A list of integrations to enable for this fine-tuning job. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param method_ [OpenAI::FineTuning::FineTuningJob::Method] The method used for fine-tuning. + # @param method_ [OpenAI::Models::FineTuning::FineTuningJob::Method] The method used for fine-tuning. # # @param object [Symbol, :"fine_tuning.job"] The object type, which is always "fine_tuning.job". - # @see OpenAI::FineTuning::FineTuningJob#error + # @see OpenAI::Models::FineTuning::FineTuningJob#error class Error < OpenAI::Internal::Type::BaseModel # @!attribute code # A machine-readable error code. @@ -208,7 +208,7 @@ class Error < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:, param:) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJob::Error} for more details. + # {OpenAI::Models::FineTuning::FineTuningJob::Error} for more details. # # For fine-tuning jobs that have `failed`, this will contain more information on # the cause of the failure. @@ -220,7 +220,7 @@ class Error < OpenAI::Internal::Type::BaseModel # @param param [String, nil] The parameter that was invalid, usually `training_file` or `validation_file`. Th end - # @see OpenAI::FineTuning::FineTuningJob#hyperparameters + # @see OpenAI::Models::FineTuning::FineTuningJob#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model @@ -248,7 +248,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJob::Hyperparameters} for more details. + # {OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters} for more details. # # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. @@ -262,7 +262,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::FineTuning::FineTuningJob::Hyperparameters#batch_size + # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -274,16 +274,12 @@ module BatchSize # @!method self.variants # @return [Array(Object, Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.nilable(T.any(T.anything, Symbol, Integer)) } - end end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::FineTuning::FineTuningJob::Hyperparameters#learning_rate_multiplier + # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -293,16 +289,12 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::FineTuning::FineTuningJob::Hyperparameters#n_epochs + # @see OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -312,17 +304,13 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end end # The current status of the fine-tuning job, which can be either # `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. # - # @see OpenAI::FineTuning::FineTuningJob#status + # @see OpenAI::Models::FineTuning::FineTuningJob#status module Status extend OpenAI::Internal::Type::Enum @@ -337,46 +325,46 @@ module Status # @return [Array] end - # @see OpenAI::FineTuning::FineTuningJob#method_ + # @see OpenAI::Models::FineTuning::FineTuningJob#method_ class Method < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @return [Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type] + # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] required :type, enum: -> { OpenAI::FineTuning::FineTuningJob::Method::Type } # @!attribute dpo # Configuration for the DPO fine-tuning method. # - # @return [OpenAI::FineTuning::DpoMethod, nil] + # @return [OpenAI::Models::FineTuning::DpoMethod, nil] optional :dpo, -> { OpenAI::FineTuning::DpoMethod } # @!attribute reinforcement # Configuration for the reinforcement fine-tuning method. # - # @return [OpenAI::FineTuning::ReinforcementMethod, nil] + # @return [OpenAI::Models::FineTuning::ReinforcementMethod, nil] optional :reinforcement, -> { OpenAI::FineTuning::ReinforcementMethod } # @!attribute supervised # Configuration for the supervised fine-tuning method. # - # @return [OpenAI::FineTuning::SupervisedMethod, nil] + # @return [OpenAI::Models::FineTuning::SupervisedMethod, nil] optional :supervised, -> { OpenAI::FineTuning::SupervisedMethod } # @!method initialize(type:, dpo: nil, reinforcement: nil, supervised: nil) # The method used for fine-tuning. # - # @param type [Symbol, OpenAI::FineTuning::FineTuningJob::Method::Type] The type of method. Is either `supervised`, `dpo`, or `reinforcement`. + # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @param dpo [OpenAI::FineTuning::DpoMethod] Configuration for the DPO fine-tuning method. + # @param dpo [OpenAI::Models::FineTuning::DpoMethod] Configuration for the DPO fine-tuning method. # - # @param reinforcement [OpenAI::FineTuning::ReinforcementMethod] Configuration for the reinforcement fine-tuning method. + # @param reinforcement [OpenAI::Models::FineTuning::ReinforcementMethod] Configuration for the reinforcement fine-tuning method. # - # @param supervised [OpenAI::FineTuning::SupervisedMethod] Configuration for the supervised fine-tuning method. + # @param supervised [OpenAI::Models::FineTuning::SupervisedMethod] Configuration for the supervised fine-tuning method. # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @see OpenAI::FineTuning::FineTuningJob::Method#type + # @see OpenAI::Models::FineTuning::FineTuningJob::Method#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index 743df8cc..d609e035 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -20,7 +20,7 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # @!attribute level # The log level of the event. # - # @return [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Level] + # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] required :level, enum: -> { OpenAI::FineTuning::FineTuningJobEvent::Level } # @!attribute message @@ -44,7 +44,7 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of event. # - # @return [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Type, nil] + # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type, nil] optional :type, enum: -> { OpenAI::FineTuning::FineTuningJobEvent::Type } # @!method initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event") @@ -54,19 +54,19 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the fine-tuning job was created. # - # @param level [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Level] The log level of the event. + # @param level [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] The log level of the event. # # @param message [String] The message of the event. # # @param data [Object] The data associated with the event. # - # @param type [Symbol, OpenAI::FineTuning::FineTuningJobEvent::Type] The type of event. + # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] The type of event. # # @param object [Symbol, :"fine_tuning.job.event"] The object type, which is always "fine_tuning.job.event". # The log level of the event. # - # @see OpenAI::FineTuning::FineTuningJobEvent#level + # @see OpenAI::Models::FineTuning::FineTuningJobEvent#level module Level extend OpenAI::Internal::Type::Enum @@ -80,7 +80,7 @@ module Level # The type of event. # - # @see OpenAI::FineTuning::FineTuningJobEvent#type + # @see OpenAI::Models::FineTuning::FineTuningJobEvent#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb index 07b43842..e9e7c30d 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb @@ -35,7 +35,7 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # @!method initialize(project:, entity: nil, name: nil, tags: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJobWandbIntegration} for more details. + # {OpenAI::Models::FineTuning::FineTuningJobWandbIntegration} for more details. # # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb index 4b691bd6..54781dde 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb @@ -16,14 +16,15 @@ class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. # - # @return [OpenAI::FineTuning::FineTuningJobWandbIntegration] + # @return [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] required :wandb, -> { OpenAI::FineTuning::FineTuningJobWandbIntegration } # @!method initialize(wandb:, type: :wandb) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::FineTuningJobWandbIntegrationObject} for more details. + # {OpenAI::Models::FineTuning::FineTuningJobWandbIntegrationObject} for more + # details. # - # @param wandb [OpenAI::FineTuning::FineTuningJobWandbIntegration] The settings for your integration with Weights and Biases. This payload specifie + # @param wandb [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] The settings for your integration with Weights and Biases. This payload specifie # # @param type [Symbol, :wandb] The type of the integration being enabled for the fine-tuning job end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index 9b1d6016..193bc2d0 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -12,7 +12,7 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). # - # @return [String, Symbol, OpenAI::FineTuning::JobCreateParams::Model] + # @return [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] required :model, union: -> { OpenAI::FineTuning::JobCreateParams::Model } # @!attribute training_file @@ -43,13 +43,13 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. # - # @return [OpenAI::FineTuning::JobCreateParams::Hyperparameters, nil] + # @return [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::FineTuning::JobCreateParams::Hyperparameters } # @!attribute integrations # A list of integrations to enable for your fine-tuning job. # - # @return [Array, nil] + # @return [Array, nil] optional :integrations, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::FineTuning::JobCreateParams::Integration] }, nil?: true @@ -68,7 +68,7 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute method_ # The method used for fine-tuning. # - # @return [OpenAI::FineTuning::JobCreateParams::Method, nil] + # @return [OpenAI::Models::FineTuning::JobCreateParams::Method, nil] optional :method_, -> { OpenAI::FineTuning::JobCreateParams::Method }, api_name: :method # @!attribute seed @@ -110,17 +110,17 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::FineTuning::JobCreateParams} for more details. # - # @param model [String, Symbol, OpenAI::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the + # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the # # @param training_file [String] The ID of an uploaded file that contains training data. # - # @param hyperparameters [OpenAI::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. # - # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. + # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param method_ [OpenAI::FineTuning::JobCreateParams::Method] The method used for fine-tuning. + # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] The method used for fine-tuning. # # @param seed [Integer, nil] The seed controls the reproducibility of the job. Passing in the same seed and j # @@ -137,13 +137,13 @@ module Model variant String - variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::BABBAGE_002 } + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::BABBAGE_002 } - variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::DAVINCI_002 } + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::DAVINCI_002 } - variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::GPT_3_5_TURBO } + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_3_5_TURBO } - variant const: -> { OpenAI::FineTuning::JobCreateParams::Model::GPT_4O_MINI } + variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_4O_MINI } # @!method self.variants # @return [Array(String, Symbol)] @@ -188,7 +188,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::JobCreateParams::Hyperparameters} for more details. + # {OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters} for more details. # # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. @@ -202,7 +202,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::FineTuning::JobCreateParams::Hyperparameters#batch_size + # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -212,16 +212,12 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::FineTuning::JobCreateParams::Hyperparameters#learning_rate_multiplier + # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -231,16 +227,12 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::FineTuning::JobCreateParams::Hyperparameters#n_epochs + # @see OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -250,10 +242,6 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end end @@ -271,18 +259,18 @@ class Integration < OpenAI::Internal::Type::BaseModel # explicit display name for your run, add tags to your run, and set a default # entity (team, username, etc) to be associated with your run. # - # @return [OpenAI::FineTuning::JobCreateParams::Integration::Wandb] + # @return [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] required :wandb, -> { OpenAI::FineTuning::JobCreateParams::Integration::Wandb } # @!method initialize(wandb:, type: :wandb) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::JobCreateParams::Integration} for more details. + # {OpenAI::Models::FineTuning::JobCreateParams::Integration} for more details. # - # @param wandb [OpenAI::FineTuning::JobCreateParams::Integration::Wandb] The settings for your integration with Weights and Biases. This payload specifie + # @param wandb [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] The settings for your integration with Weights and Biases. This payload specifie # # @param type [Symbol, :wandb] The type of integration to enable. Currently, only "wandb" (Weights and Biases) - # @see OpenAI::FineTuning::JobCreateParams::Integration#wandb + # @see OpenAI::Models::FineTuning::JobCreateParams::Integration#wandb class Wandb < OpenAI::Internal::Type::BaseModel # @!attribute project # The name of the project that the new run will be created under. @@ -315,7 +303,8 @@ class Wandb < OpenAI::Internal::Type::BaseModel # @!method initialize(project:, entity: nil, name: nil, tags: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::JobCreateParams::Integration::Wandb} for more details. + # {OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb} for more + # details. # # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an @@ -336,41 +325,41 @@ class Method < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @return [Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type] + # @return [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] required :type, enum: -> { OpenAI::FineTuning::JobCreateParams::Method::Type } # @!attribute dpo # Configuration for the DPO fine-tuning method. # - # @return [OpenAI::FineTuning::DpoMethod, nil] + # @return [OpenAI::Models::FineTuning::DpoMethod, nil] optional :dpo, -> { OpenAI::FineTuning::DpoMethod } # @!attribute reinforcement # Configuration for the reinforcement fine-tuning method. # - # @return [OpenAI::FineTuning::ReinforcementMethod, nil] + # @return [OpenAI::Models::FineTuning::ReinforcementMethod, nil] optional :reinforcement, -> { OpenAI::FineTuning::ReinforcementMethod } # @!attribute supervised # Configuration for the supervised fine-tuning method. # - # @return [OpenAI::FineTuning::SupervisedMethod, nil] + # @return [OpenAI::Models::FineTuning::SupervisedMethod, nil] optional :supervised, -> { OpenAI::FineTuning::SupervisedMethod } # @!method initialize(type:, dpo: nil, reinforcement: nil, supervised: nil) # The method used for fine-tuning. # - # @param type [Symbol, OpenAI::FineTuning::JobCreateParams::Method::Type] The type of method. Is either `supervised`, `dpo`, or `reinforcement`. + # @param type [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @param dpo [OpenAI::FineTuning::DpoMethod] Configuration for the DPO fine-tuning method. + # @param dpo [OpenAI::Models::FineTuning::DpoMethod] Configuration for the DPO fine-tuning method. # - # @param reinforcement [OpenAI::FineTuning::ReinforcementMethod] Configuration for the reinforcement fine-tuning method. + # @param reinforcement [OpenAI::Models::FineTuning::ReinforcementMethod] Configuration for the reinforcement fine-tuning method. # - # @param supervised [OpenAI::FineTuning::SupervisedMethod] Configuration for the supervised fine-tuning method. + # @param supervised [OpenAI::Models::FineTuning::SupervisedMethod] Configuration for the supervised fine-tuning method. # The type of method. Is either `supervised`, `dpo`, or `reinforcement`. # - # @see OpenAI::FineTuning::JobCreateParams::Method#type + # @see OpenAI::Models::FineTuning::JobCreateParams::Method#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index f0a8ff33..cb4e4a9b 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -33,7 +33,7 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # @!attribute metrics # Metrics at the step number during the fine-tuning job. # - # @return [OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] + # @return [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] required :metrics, -> { OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics } # @!attribute object @@ -60,13 +60,13 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # # @param fine_tuning_job_id [String] The name of the fine-tuning job that this checkpoint was created from. # - # @param metrics [OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] Metrics at the step number during the fine-tuning job. + # @param metrics [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] Metrics at the step number during the fine-tuning job. # # @param step_number [Integer] The step number that the checkpoint was created at. # # @param object [Symbol, :"fine_tuning.job.checkpoint"] The object type, which is always "fine_tuning.job.checkpoint". - # @see OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint#metrics + # @see OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint#metrics class Metrics < OpenAI::Internal::Type::BaseModel # @!attribute full_valid_loss # diff --git a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb index ee22496e..8484df8c 100644 --- a/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/reinforcement_hyperparameters.rb @@ -48,7 +48,7 @@ class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel # @!attribute reasoning_effort # Level of reasoning effort. # - # @return [Symbol, OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::FineTuning::ReinforcementHyperparameters::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort @@ -56,7 +56,7 @@ class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel # @!method initialize(batch_size: nil, compute_multiplier: nil, eval_interval: nil, eval_samples: nil, learning_rate_multiplier: nil, n_epochs: nil, reasoning_effort: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::ReinforcementHyperparameters} for more details. + # {OpenAI::Models::FineTuning::ReinforcementHyperparameters} for more details. # # The hyperparameters used for the reinforcement fine-tuning job. # @@ -72,12 +72,12 @@ class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel # # @param n_epochs [Symbol, :auto, Integer] The number of epochs to train the model for. An epoch refers to one full cycle t # - # @param reasoning_effort [Symbol, OpenAI::FineTuning::ReinforcementHyperparameters::ReasoningEffort] Level of reasoning effort. + # @param reasoning_effort [Symbol, OpenAI::Models::FineTuning::ReinforcementHyperparameters::ReasoningEffort] Level of reasoning effort. # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#batch_size + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -87,15 +87,11 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Multiplier on amount of compute used for exploring search space during training. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#compute_multiplier + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#compute_multiplier module ComputeMultiplier extend OpenAI::Internal::Type::Union @@ -105,15 +101,11 @@ module ComputeMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of training steps between evaluation runs. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#eval_interval + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#eval_interval module EvalInterval extend OpenAI::Internal::Type::Union @@ -123,15 +115,11 @@ module EvalInterval # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Number of evaluation samples to generate per training step. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#eval_samples + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#eval_samples module EvalSamples extend OpenAI::Internal::Type::Union @@ -141,16 +129,12 @@ module EvalSamples # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#learning_rate_multiplier + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -160,16 +144,12 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#n_epochs + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -179,15 +159,11 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Level of reasoning effort. # - # @see OpenAI::FineTuning::ReinforcementHyperparameters#reasoning_effort + # @see OpenAI::Models::FineTuning::ReinforcementHyperparameters#reasoning_effort module ReasoningEffort extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/fine_tuning/reinforcement_method.rb b/lib/openai/models/fine_tuning/reinforcement_method.rb index cfe966c9..40395ee8 100644 --- a/lib/openai/models/fine_tuning/reinforcement_method.rb +++ b/lib/openai/models/fine_tuning/reinforcement_method.rb @@ -7,25 +7,25 @@ class ReinforcementMethod < OpenAI::Internal::Type::BaseModel # @!attribute grader # The grader used for the fine-tuning job. # - # @return [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] + # @return [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] required :grader, union: -> { OpenAI::FineTuning::ReinforcementMethod::Grader } # @!attribute hyperparameters # The hyperparameters used for the reinforcement fine-tuning job. # - # @return [OpenAI::FineTuning::ReinforcementHyperparameters, nil] + # @return [OpenAI::Models::FineTuning::ReinforcementHyperparameters, nil] optional :hyperparameters, -> { OpenAI::FineTuning::ReinforcementHyperparameters } # @!method initialize(grader:, hyperparameters: nil) # Configuration for the reinforcement fine-tuning method. # - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # - # @param hyperparameters [OpenAI::FineTuning::ReinforcementHyperparameters] The hyperparameters used for the reinforcement fine-tuning job. + # @param hyperparameters [OpenAI::Models::FineTuning::ReinforcementHyperparameters] The hyperparameters used for the reinforcement fine-tuning job. # The grader used for the fine-tuning job. # - # @see OpenAI::FineTuning::ReinforcementMethod#grader + # @see OpenAI::Models::FineTuning::ReinforcementMethod#grader module Grader extend OpenAI::Internal::Type::Union @@ -45,19 +45,7 @@ module Grader variant -> { OpenAI::Graders::MultiGrader } # @!method self.variants - # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) - end - end + # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader)] end end end diff --git a/lib/openai/models/fine_tuning/supervised_hyperparameters.rb b/lib/openai/models/fine_tuning/supervised_hyperparameters.rb index 2a907416..a7965756 100644 --- a/lib/openai/models/fine_tuning/supervised_hyperparameters.rb +++ b/lib/openai/models/fine_tuning/supervised_hyperparameters.rb @@ -28,7 +28,7 @@ class SupervisedHyperparameters < OpenAI::Internal::Type::BaseModel # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FineTuning::SupervisedHyperparameters} for more details. + # {OpenAI::Models::FineTuning::SupervisedHyperparameters} for more details. # # The hyperparameters used for the fine-tuning job. # @@ -41,7 +41,7 @@ class SupervisedHyperparameters < OpenAI::Internal::Type::BaseModel # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # - # @see OpenAI::FineTuning::SupervisedHyperparameters#batch_size + # @see OpenAI::Models::FineTuning::SupervisedHyperparameters#batch_size module BatchSize extend OpenAI::Internal::Type::Union @@ -51,16 +51,12 @@ module BatchSize # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # - # @see OpenAI::FineTuning::SupervisedHyperparameters#learning_rate_multiplier + # @see OpenAI::Models::FineTuning::SupervisedHyperparameters#learning_rate_multiplier module LearningRateMultiplier extend OpenAI::Internal::Type::Union @@ -70,16 +66,12 @@ module LearningRateMultiplier # @!method self.variants # @return [Array(Symbol, :auto, Float)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Float) } - end end # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # - # @see OpenAI::FineTuning::SupervisedHyperparameters#n_epochs + # @see OpenAI::Models::FineTuning::SupervisedHyperparameters#n_epochs module NEpochs extend OpenAI::Internal::Type::Union @@ -89,10 +81,6 @@ module NEpochs # @!method self.variants # @return [Array(Symbol, :auto, Integer)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(Symbol, Integer) } - end end end end diff --git a/lib/openai/models/fine_tuning/supervised_method.rb b/lib/openai/models/fine_tuning/supervised_method.rb index f45655a0..f81f3648 100644 --- a/lib/openai/models/fine_tuning/supervised_method.rb +++ b/lib/openai/models/fine_tuning/supervised_method.rb @@ -7,13 +7,13 @@ class SupervisedMethod < OpenAI::Internal::Type::BaseModel # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # - # @return [OpenAI::FineTuning::SupervisedHyperparameters, nil] + # @return [OpenAI::Models::FineTuning::SupervisedHyperparameters, nil] optional :hyperparameters, -> { OpenAI::FineTuning::SupervisedHyperparameters } # @!method initialize(hyperparameters: nil) # Configuration for the supervised fine-tuning method. # - # @param hyperparameters [OpenAI::FineTuning::SupervisedHyperparameters] The hyperparameters used for the fine-tuning job. + # @param hyperparameters [OpenAI::Models::FineTuning::SupervisedHyperparameters] The hyperparameters used for the fine-tuning job. end end end diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index 10fe7935..14e2bc4c 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -41,7 +41,7 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, description: nil, parameters: nil, strict: nil) # Some parameter documentations has been truncated, see - # {OpenAI::FunctionDefinition} for more details. + # {OpenAI::Models::FunctionDefinition} for more details. # # @param name [String] The name of the function to be called. Must be a-z, A-Z, 0-9, or contain undersc # diff --git a/lib/openai/models/graders/label_model_grader.rb b/lib/openai/models/graders/label_model_grader.rb index b6fa6510..0f0d52aa 100644 --- a/lib/openai/models/graders/label_model_grader.rb +++ b/lib/openai/models/graders/label_model_grader.rb @@ -6,8 +6,8 @@ module Graders class LabelModelGrader < OpenAI::Internal::Type::BaseModel # @!attribute input # - # @return [Array] - required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::EvalItem] } + # @return [Array] + required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Graders::LabelModelGrader::Input] } # @!attribute labels # The labels to assign to each item in the evaluation. @@ -43,7 +43,7 @@ class LabelModelGrader < OpenAI::Internal::Type::BaseModel # A LabelModelGrader object which uses a model to assign labels to each item in # the evaluation. # - # @param input [Array] + # @param input [Array] # # @param labels [Array] The labels to assign to each item in the evaluation. # @@ -54,6 +54,115 @@ class LabelModelGrader < OpenAI::Internal::Type::BaseModel # @param passing_labels [Array] The labels that indicate a passing result. Must be a subset of labels. # # @param type [Symbol, :label_model] The object type, which is always `label_model`. + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText] + required :content, union: -> { OpenAI::Graders::LabelModelGrader::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Graders::LabelModelGrader::Input::Role] + required :role, enum: -> { OpenAI::Graders::LabelModelGrader::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Graders::LabelModelGrader::Input::Type, nil] + optional :type, enum: -> { OpenAI::Graders::LabelModelGrader::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Graders::LabelModelGrader::Input} for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Graders::LabelModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Graders::LabelModelGrader::Input::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Graders::LabelModelGrader::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Graders::LabelModelGrader::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText} for more + # details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::LabelModelGrader::Input::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Graders::LabelModelGrader::Input#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Graders::LabelModelGrader::Input#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end end end diff --git a/lib/openai/models/graders/multi_grader.rb b/lib/openai/models/graders/multi_grader.rb index 520ef0e2..de0d1240 100644 --- a/lib/openai/models/graders/multi_grader.rb +++ b/lib/openai/models/graders/multi_grader.rb @@ -12,7 +12,7 @@ class MultiGrader < OpenAI::Internal::Type::BaseModel # @!attribute graders # - # @return [Hash{Symbol=>OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader}] + # @return [Hash{Symbol=>OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::LabelModelGrader}] required :graders, -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Graders::MultiGrader::Grader] } # @!attribute name @@ -22,7 +22,7 @@ class MultiGrader < OpenAI::Internal::Type::BaseModel required :name, String # @!attribute type - # The type of grader. + # The object type, which is always `multi`. # # @return [Symbol, :multi] required :type, const: :multi @@ -33,11 +33,11 @@ class MultiGrader < OpenAI::Internal::Type::BaseModel # # @param calculate_output [String] A formula to calculate the output based on grader results. # - # @param graders [Hash{Symbol=>OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader}] + # @param graders [Hash{Symbol=>OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::LabelModelGrader}] # # @param name [String] The name of the grader. # - # @param type [Symbol, :multi] The type of grader. + # @param type [Symbol, :multi] The object type, which is always `multi`. # A StringCheckGrader object that performs a string comparison between input and # reference using a specified operation. @@ -61,19 +61,7 @@ module Grader variant -> { OpenAI::Graders::LabelModelGrader } # @!method self.variants - # @return [Array(OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::LabelModelGrader)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::LabelModelGrader - ) - end - end + # @return [Array(OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::LabelModelGrader)] end end end diff --git a/lib/openai/models/graders/score_model_grader.rb b/lib/openai/models/graders/score_model_grader.rb index 353d7ae9..bdec7e50 100644 --- a/lib/openai/models/graders/score_model_grader.rb +++ b/lib/openai/models/graders/score_model_grader.rb @@ -7,8 +7,8 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @!attribute input # The input text. This may include template strings. # - # @return [Array] - required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::EvalItem] } + # @return [Array] + required :input, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Graders::ScoreModelGrader::Input] } # @!attribute model # The model to use for the evaluation. @@ -43,7 +43,7 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @!method initialize(input:, model:, name:, range: nil, sampling_params: nil, type: :score_model) # A ScoreModelGrader object that uses a model to assign a score to the input. # - # @param input [Array] The input text. This may include template strings. + # @param input [Array] The input text. This may include template strings. # # @param model [String] The model to use for the evaluation. # @@ -54,6 +54,115 @@ class ScoreModelGrader < OpenAI::Internal::Type::BaseModel # @param sampling_params [Object] The sampling parameters for the model. # # @param type [Symbol, :score_model] The object type, which is always `score_model`. + + class Input < OpenAI::Internal::Type::BaseModel + # @!attribute content + # Text inputs to the model - can contain template strings. + # + # @return [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText] + required :content, union: -> { OpenAI::Graders::ScoreModelGrader::Input::Content } + + # @!attribute role + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @return [Symbol, OpenAI::Models::Graders::ScoreModelGrader::Input::Role] + required :role, enum: -> { OpenAI::Graders::ScoreModelGrader::Input::Role } + + # @!attribute type + # The type of the message input. Always `message`. + # + # @return [Symbol, OpenAI::Models::Graders::ScoreModelGrader::Input::Type, nil] + optional :type, enum: -> { OpenAI::Graders::ScoreModelGrader::Input::Type } + + # @!method initialize(content:, role:, type: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Graders::ScoreModelGrader::Input} for more details. + # + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText] Text inputs to the model - can contain template strings. + # + # @param role [Symbol, OpenAI::Models::Graders::ScoreModelGrader::Input::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # + # @param type [Symbol, OpenAI::Models::Graders::ScoreModelGrader::Input::Type] The type of the message input. Always `message`. + + # Text inputs to the model - can contain template strings. + # + # @see OpenAI::Models::Graders::ScoreModelGrader::Input#content + module Content + extend OpenAI::Internal::Type::Union + + # A text input to the model. + variant String + + # A text input to the model. + variant -> { OpenAI::Responses::ResponseInputText } + + # A text output from the model. + variant -> { OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText } + + class OutputText < OpenAI::Internal::Type::BaseModel + # @!attribute text + # The text output from the model. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the output text. Always `output_text`. + # + # @return [Symbol, :output_text] + required :type, const: :output_text + + # @!method initialize(text:, type: :output_text) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText} for more + # details. + # + # A text output from the model. + # + # @param text [String] The text output from the model. + # + # @param type [Symbol, :output_text] The type of the output text. Always `output_text`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Graders::ScoreModelGrader::Input::Content::OutputText)] + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + # + # @see OpenAI::Models::Graders::ScoreModelGrader::Input#role + module Role + extend OpenAI::Internal::Type::Enum + + USER = :user + ASSISTANT = :assistant + SYSTEM = :system + DEVELOPER = :developer + + # @!method self.values + # @return [Array] + end + + # The type of the message input. Always `message`. + # + # @see OpenAI::Models::Graders::ScoreModelGrader::Input#type + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE = :message + + # @!method self.values + # @return [Array] + end + end end end diff --git a/lib/openai/models/graders/string_check_grader.rb b/lib/openai/models/graders/string_check_grader.rb index 90aeb689..2d8f5a30 100644 --- a/lib/openai/models/graders/string_check_grader.rb +++ b/lib/openai/models/graders/string_check_grader.rb @@ -19,7 +19,7 @@ class StringCheckGrader < OpenAI::Internal::Type::BaseModel # @!attribute operation # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # - # @return [Symbol, OpenAI::Graders::StringCheckGrader::Operation] + # @return [Symbol, OpenAI::Models::Graders::StringCheckGrader::Operation] required :operation, enum: -> { OpenAI::Graders::StringCheckGrader::Operation } # @!attribute reference @@ -42,7 +42,7 @@ class StringCheckGrader < OpenAI::Internal::Type::BaseModel # # @param name [String] The name of the grader. # - # @param operation [Symbol, OpenAI::Graders::StringCheckGrader::Operation] The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. + # @param operation [Symbol, OpenAI::Models::Graders::StringCheckGrader::Operation] The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # # @param reference [String] The reference text. This may include template strings. # @@ -50,7 +50,7 @@ class StringCheckGrader < OpenAI::Internal::Type::BaseModel # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # - # @see OpenAI::Graders::StringCheckGrader#operation + # @see OpenAI::Models::Graders::StringCheckGrader#operation module Operation extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/graders/text_similarity_grader.rb b/lib/openai/models/graders/text_similarity_grader.rb index 30b7eaad..a64e3314 100644 --- a/lib/openai/models/graders/text_similarity_grader.rb +++ b/lib/openai/models/graders/text_similarity_grader.rb @@ -8,7 +8,7 @@ class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. # - # @return [Symbol, OpenAI::Graders::TextSimilarityGrader::EvaluationMetric] + # @return [Symbol, OpenAI::Models::Graders::TextSimilarityGrader::EvaluationMetric] required :evaluation_metric, enum: -> { OpenAI::Graders::TextSimilarityGrader::EvaluationMetric } # @!attribute input @@ -37,11 +37,11 @@ class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel # @!method initialize(evaluation_metric:, input:, name:, reference:, type: :text_similarity) # Some parameter documentations has been truncated, see - # {OpenAI::Graders::TextSimilarityGrader} for more details. + # {OpenAI::Models::Graders::TextSimilarityGrader} for more details. # # A TextSimilarityGrader object which grades text based on similarity metrics. # - # @param evaluation_metric [Symbol, OpenAI::Graders::TextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, `r + # @param evaluation_metric [Symbol, OpenAI::Models::Graders::TextSimilarityGrader::EvaluationMetric] The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, `r # # @param input [String] The text being graded. # @@ -54,7 +54,7 @@ class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel # The evaluation metric to use. One of `fuzzy_match`, `bleu`, `gleu`, `meteor`, # `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. # - # @see OpenAI::Graders::TextSimilarityGrader#evaluation_metric + # @see OpenAI::Models::Graders::TextSimilarityGrader#evaluation_metric module EvaluationMetric extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index e10a18f2..bedd8bcf 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -26,8 +26,8 @@ class Image < OpenAI::Internal::Type::BaseModel optional :url, String # @!method initialize(b64_json: nil, revised_prompt: nil, url: nil) - # Some parameter documentations has been truncated, see {OpenAI::Image} for more - # details. + # Some parameter documentations has been truncated, see {OpenAI::Models::Image} + # for more details. # # Represents the content or the URL of an image generated by the OpenAI API. # diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index f83ea80a..29559ca4 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -11,14 +11,14 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart] required :image, OpenAI::Internal::Type::FileInput # @!attribute model # The model to use for image generation. Only `dall-e-2` is supported at this # time. # - # @return [String, Symbol, OpenAI::ImageModel, nil] + # @return [String, Symbol, OpenAI::Models::ImageModel, nil] optional :model, union: -> { OpenAI::ImageCreateVariationParams::Model }, nil?: true # @!attribute n @@ -32,14 +32,14 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # `b64_json`. URLs are only valid for 60 minutes after the image has been # generated. # - # @return [Symbol, OpenAI::ImageCreateVariationParams::ResponseFormat, nil] + # @return [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::ImageCreateVariationParams::ResponseFormat }, nil?: true # @!attribute size # The size of the generated images. Must be one of `256x256`, `512x512`, or # `1024x1024`. # - # @return [Symbol, OpenAI::ImageCreateVariationParams::Size, nil] + # @return [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] optional :size, enum: -> { OpenAI::ImageCreateVariationParams::Size }, nil?: true # @!attribute user @@ -54,15 +54,15 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageCreateVariationParams} for more details. # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le + # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param response_format [Symbol, OpenAI::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x + # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # @@ -79,11 +79,7 @@ module Model variant enum: -> { OpenAI::ImageModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ImageModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # The format in which the generated images are returned. Must be one of `url` or diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index aec90bd0..ea3225f3 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -16,7 +16,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # For `dall-e-2`, you can only provide one image, and it should be a square `png` # file less than 4MB. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart, Array] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart, Array] required :image, union: -> { OpenAI::ImageEditParams::Image } # @!attribute prompt @@ -35,7 +35,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # If `transparent`, the output format needs to support transparency, so it should # be set to either `png` (default value) or `webp`. # - # @return [Symbol, OpenAI::ImageEditParams::Background, nil] + # @return [Symbol, OpenAI::Models::ImageEditParams::Background, nil] optional :background, enum: -> { OpenAI::ImageEditParams::Background }, nil?: true # @!attribute mask @@ -44,7 +44,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # the mask will be applied on the first image. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart, nil] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart, nil] optional :mask, OpenAI::Internal::Type::FileInput # @!attribute model @@ -52,7 +52,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # supported. Defaults to `dall-e-2` unless a parameter specific to `gpt-image-1` # is used. # - # @return [String, Symbol, OpenAI::ImageModel, nil] + # @return [String, Symbol, OpenAI::Models::ImageModel, nil] optional :model, union: -> { OpenAI::ImageEditParams::Model }, nil?: true # @!attribute n @@ -66,7 +66,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # only supported for `gpt-image-1`. `dall-e-2` only supports `standard` quality. # Defaults to `auto`. # - # @return [Symbol, OpenAI::ImageEditParams::Quality, nil] + # @return [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] optional :quality, enum: -> { OpenAI::ImageEditParams::Quality }, nil?: true # @!attribute response_format @@ -75,7 +75,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # generated. This parameter is only supported for `dall-e-2`, as `gpt-image-1` # will always return base64-encoded images. # - # @return [Symbol, OpenAI::ImageEditParams::ResponseFormat, nil] + # @return [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::ImageEditParams::ResponseFormat }, nil?: true # @!attribute size @@ -83,7 +83,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # (landscape), `1024x1536` (portrait), or `auto` (default value) for # `gpt-image-1`, and one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`. # - # @return [Symbol, OpenAI::ImageEditParams::Size, nil] + # @return [Symbol, OpenAI::Models::ImageEditParams::Size, nil] optional :size, enum: -> { OpenAI::ImageEditParams::Size }, nil?: true # @!attribute user @@ -98,23 +98,23 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::ImageEditParams} for more details. # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. + # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character # - # @param background [Symbol, OpenAI::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). + # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). # - # @param mask [Pathname, StringIO, IO, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind + # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param quality [Symbol, OpenAI::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are + # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are # - # @param response_format [Symbol, OpenAI::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # @@ -132,15 +132,11 @@ module Image variant OpenAI::Internal::Type::FileInput - variant -> { OpenAI::ImageEditParams::Image::StringArray } + variant -> { OpenAI::Models::ImageEditParams::Image::StringArray } # @!method self.variants # @return [Array(StringIO, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(StringIO, T::Array[StringIO]) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::FileInput] end @@ -175,11 +171,7 @@ module Model variant enum: -> { OpenAI::ImageModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ImageModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # The quality of the image that will be generated. `high`, `medium` and `low` are diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index a3aca3cb..b0b47d41 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -24,7 +24,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # If `transparent`, the output format needs to support transparency, so it should # be set to either `png` (default value) or `webp`. # - # @return [Symbol, OpenAI::ImageGenerateParams::Background, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] optional :background, enum: -> { OpenAI::ImageGenerateParams::Background }, nil?: true # @!attribute model @@ -32,14 +32,14 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # `gpt-image-1`. Defaults to `dall-e-2` unless a parameter specific to # `gpt-image-1` is used. # - # @return [String, Symbol, OpenAI::ImageModel, nil] + # @return [String, Symbol, OpenAI::Models::ImageModel, nil] optional :model, union: -> { OpenAI::ImageGenerateParams::Model }, nil?: true # @!attribute moderation # Control the content-moderation level for images generated by `gpt-image-1`. Must # be either `low` for less restrictive filtering or `auto` (default value). # - # @return [Symbol, OpenAI::ImageGenerateParams::Moderation, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] optional :moderation, enum: -> { OpenAI::ImageGenerateParams::Moderation }, nil?: true # @!attribute n @@ -61,7 +61,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # The format in which the generated images are returned. This parameter is only # supported for `gpt-image-1`. Must be one of `png`, `jpeg`, or `webp`. # - # @return [Symbol, OpenAI::ImageGenerateParams::OutputFormat, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] optional :output_format, enum: -> { OpenAI::ImageGenerateParams::OutputFormat }, nil?: true # @!attribute quality @@ -73,7 +73,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # - `hd` and `standard` are supported for `dall-e-3`. # - `standard` is the only option for `dall-e-2`. # - # @return [Symbol, OpenAI::ImageGenerateParams::Quality, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] optional :quality, enum: -> { OpenAI::ImageGenerateParams::Quality }, nil?: true # @!attribute response_format @@ -82,7 +82,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # after the image has been generated. This parameter isn't supported for # `gpt-image-1` which will always return base64-encoded images. # - # @return [Symbol, OpenAI::ImageGenerateParams::ResponseFormat, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::ImageGenerateParams::ResponseFormat }, nil?: true # @!attribute size @@ -91,7 +91,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # `gpt-image-1`, one of `256x256`, `512x512`, or `1024x1024` for `dall-e-2`, and # one of `1024x1024`, `1792x1024`, or `1024x1792` for `dall-e-3`. # - # @return [Symbol, OpenAI::ImageGenerateParams::Size, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] optional :size, enum: -> { OpenAI::ImageGenerateParams::Size }, nil?: true # @!attribute style @@ -100,7 +100,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # towards generating hyper-real and dramatic images. Natural causes the model to # produce more natural, less hyper-real looking images. # - # @return [Symbol, OpenAI::ImageGenerateParams::Style, nil] + # @return [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] optional :style, enum: -> { OpenAI::ImageGenerateParams::Style }, nil?: true # @!attribute user @@ -117,25 +117,25 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte # - # @param background [Symbol, OpenAI::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). + # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im # - # @param moderation [Symbol, OpenAI::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must + # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only # # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter is only # - # @param output_format [Symbol, OpenAI::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su + # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su # - # @param quality [Symbol, OpenAI::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. # - # @param response_format [Symbol, OpenAI::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned + # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned # - # @param size [Symbol, OpenAI::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # - # @param style [Symbol, OpenAI::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- + # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # @@ -171,11 +171,7 @@ module Model variant enum: -> { OpenAI::ImageModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ImageModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ImageModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # Control the content-moderation level for images generated by `gpt-image-1`. Must diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index df579e91..ecd33bc9 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -13,28 +13,28 @@ class ImagesResponse < OpenAI::Internal::Type::BaseModel # @!attribute data # The list of generated images. # - # @return [Array, nil] + # @return [Array, nil] optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Image] } # @!attribute usage # For `gpt-image-1` only, the token usage information for the image generation. # - # @return [OpenAI::ImagesResponse::Usage, nil] + # @return [OpenAI::Models::ImagesResponse::Usage, nil] optional :usage, -> { OpenAI::ImagesResponse::Usage } # @!method initialize(created:, data: nil, usage: nil) - # Some parameter documentations has been truncated, see {OpenAI::ImagesResponse} - # for more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ImagesResponse} for more details. # # The response from the image generation endpoint. # # @param created [Integer] The Unix timestamp (in seconds) of when the image was created. # - # @param data [Array] The list of generated images. + # @param data [Array] The list of generated images. # - # @param usage [OpenAI::ImagesResponse::Usage] For `gpt-image-1` only, the token usage information for the image generation. + # @param usage [OpenAI::Models::ImagesResponse::Usage] For `gpt-image-1` only, the token usage information for the image generation. - # @see OpenAI::ImagesResponse#usage + # @see OpenAI::Models::ImagesResponse#usage class Usage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens # The number of tokens (images and text) in the input prompt. @@ -45,7 +45,7 @@ class Usage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens_details # The input tokens detailed information for the image generation. # - # @return [OpenAI::ImagesResponse::Usage::InputTokensDetails] + # @return [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] required :input_tokens_details, -> { OpenAI::ImagesResponse::Usage::InputTokensDetails } # @!attribute output_tokens @@ -65,13 +65,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # # @param input_tokens [Integer] The number of tokens (images and text) in the input prompt. # - # @param input_tokens_details [OpenAI::ImagesResponse::Usage::InputTokensDetails] The input tokens detailed information for the image generation. + # @param input_tokens_details [OpenAI::Models::ImagesResponse::Usage::InputTokensDetails] The input tokens detailed information for the image generation. # # @param output_tokens [Integer] The number of image tokens in the output image. # # @param total_tokens [Integer] The total number of tokens (images and text) used for the image generation. - # @see OpenAI::ImagesResponse::Usage#input_tokens_details + # @see OpenAI::Models::ImagesResponse::Usage#input_tokens_details class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute image_tokens # The number of image tokens in the input prompt. diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 39fc2d41..82a8c595 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -6,19 +6,19 @@ class Moderation < OpenAI::Internal::Type::BaseModel # @!attribute categories # A list of the categories, and whether they are flagged or not. # - # @return [OpenAI::Moderation::Categories] + # @return [OpenAI::Models::Moderation::Categories] required :categories, -> { OpenAI::Moderation::Categories } # @!attribute category_applied_input_types # A list of the categories along with the input type(s) that the score applies to. # - # @return [OpenAI::Moderation::CategoryAppliedInputTypes] + # @return [OpenAI::Models::Moderation::CategoryAppliedInputTypes] required :category_applied_input_types, -> { OpenAI::Moderation::CategoryAppliedInputTypes } # @!attribute category_scores # A list of the categories along with their scores as predicted by model. # - # @return [OpenAI::Moderation::CategoryScores] + # @return [OpenAI::Models::Moderation::CategoryScores] required :category_scores, -> { OpenAI::Moderation::CategoryScores } # @!attribute flagged @@ -28,18 +28,18 @@ class Moderation < OpenAI::Internal::Type::BaseModel required :flagged, OpenAI::Internal::Type::Boolean # @!method initialize(categories:, category_applied_input_types:, category_scores:, flagged:) - # Some parameter documentations has been truncated, see {OpenAI::Moderation} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Moderation} for more details. # - # @param categories [OpenAI::Moderation::Categories] A list of the categories, and whether they are flagged or not. + # @param categories [OpenAI::Models::Moderation::Categories] A list of the categories, and whether they are flagged or not. # - # @param category_applied_input_types [OpenAI::Moderation::CategoryAppliedInputTypes] A list of the categories along with the input type(s) that the score applies to. + # @param category_applied_input_types [OpenAI::Models::Moderation::CategoryAppliedInputTypes] A list of the categories along with the input type(s) that the score applies to. # - # @param category_scores [OpenAI::Moderation::CategoryScores] A list of the categories along with their scores as predicted by model. + # @param category_scores [OpenAI::Models::Moderation::CategoryScores] A list of the categories along with their scores as predicted by model. # # @param flagged [Boolean] Whether any of the below categories are flagged. - # @see OpenAI::Moderation#categories + # @see OpenAI::Models::Moderation#categories class Categories < OpenAI::Internal::Type::BaseModel # @!attribute harassment # Content that expresses, incites, or promotes harassing language towards any @@ -138,7 +138,7 @@ class Categories < OpenAI::Internal::Type::BaseModel # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) # Some parameter documentations has been truncated, see - # {OpenAI::Moderation::Categories} for more details. + # {OpenAI::Models::Moderation::Categories} for more details. # # A list of the categories, and whether they are flagged or not. # @@ -169,12 +169,12 @@ class Categories < OpenAI::Internal::Type::BaseModel # @param violence_graphic [Boolean] Content that depicts death, violence, or physical injury in graphic detail. end - # @see OpenAI::Moderation#category_applied_input_types + # @see OpenAI::Models::Moderation#category_applied_input_types class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute harassment # The applied input type(s) for the category 'harassment'. # - # @return [Array] + # @return [Array] required :harassment, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Harassment] @@ -183,7 +183,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute harassment_threatening # The applied input type(s) for the category 'harassment/threatening'. # - # @return [Array] + # @return [Array] required :harassment_threatening, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::HarassmentThreatening] @@ -193,7 +193,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute hate # The applied input type(s) for the category 'hate'. # - # @return [Array] + # @return [Array] required :hate, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Hate] @@ -202,7 +202,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute hate_threatening # The applied input type(s) for the category 'hate/threatening'. # - # @return [Array] + # @return [Array] required :hate_threatening, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::HateThreatening] @@ -212,7 +212,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute illicit # The applied input type(s) for the category 'illicit'. # - # @return [Array] + # @return [Array] required :illicit, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Illicit] @@ -221,7 +221,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute illicit_violent # The applied input type(s) for the category 'illicit/violent'. # - # @return [Array] + # @return [Array] required :illicit_violent, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::IllicitViolent] @@ -231,7 +231,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute self_harm # The applied input type(s) for the category 'self-harm'. # - # @return [Array] + # @return [Array] required :self_harm, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarm] @@ -241,7 +241,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute self_harm_instructions # The applied input type(s) for the category 'self-harm/instructions'. # - # @return [Array] + # @return [Array] required :self_harm_instructions, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction] @@ -251,7 +251,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute self_harm_intent # The applied input type(s) for the category 'self-harm/intent'. # - # @return [Array] + # @return [Array] required :self_harm_intent, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SelfHarmIntent] @@ -261,7 +261,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute sexual # The applied input type(s) for the category 'sexual'. # - # @return [Array] + # @return [Array] required :sexual, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Sexual] @@ -270,7 +270,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute sexual_minors # The applied input type(s) for the category 'sexual/minors'. # - # @return [Array] + # @return [Array] required :sexual_minors, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::SexualMinor] @@ -280,7 +280,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute violence # The applied input type(s) for the category 'violence'. # - # @return [Array] + # @return [Array] required :violence, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::Violence] @@ -289,7 +289,7 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!attribute violence_graphic # The applied input type(s) for the category 'violence/graphic'. # - # @return [Array] + # @return [Array] required :violence_graphic, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Moderation::CategoryAppliedInputTypes::ViolenceGraphic] @@ -299,31 +299,31 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) # A list of the categories along with the input type(s) that the score applies to. # - # @param harassment [Array] The applied input type(s) for the category 'harassment'. + # @param harassment [Array] The applied input type(s) for the category 'harassment'. # - # @param harassment_threatening [Array] The applied input type(s) for the category 'harassment/threatening'. + # @param harassment_threatening [Array] The applied input type(s) for the category 'harassment/threatening'. # - # @param hate [Array] The applied input type(s) for the category 'hate'. + # @param hate [Array] The applied input type(s) for the category 'hate'. # - # @param hate_threatening [Array] The applied input type(s) for the category 'hate/threatening'. + # @param hate_threatening [Array] The applied input type(s) for the category 'hate/threatening'. # - # @param illicit [Array] The applied input type(s) for the category 'illicit'. + # @param illicit [Array] The applied input type(s) for the category 'illicit'. # - # @param illicit_violent [Array] The applied input type(s) for the category 'illicit/violent'. + # @param illicit_violent [Array] The applied input type(s) for the category 'illicit/violent'. # - # @param self_harm [Array] The applied input type(s) for the category 'self-harm'. + # @param self_harm [Array] The applied input type(s) for the category 'self-harm'. # - # @param self_harm_instructions [Array] The applied input type(s) for the category 'self-harm/instructions'. + # @param self_harm_instructions [Array] The applied input type(s) for the category 'self-harm/instructions'. # - # @param self_harm_intent [Array] The applied input type(s) for the category 'self-harm/intent'. + # @param self_harm_intent [Array] The applied input type(s) for the category 'self-harm/intent'. # - # @param sexual [Array] The applied input type(s) for the category 'sexual'. + # @param sexual [Array] The applied input type(s) for the category 'sexual'. # - # @param sexual_minors [Array] The applied input type(s) for the category 'sexual/minors'. + # @param sexual_minors [Array] The applied input type(s) for the category 'sexual/minors'. # - # @param violence [Array] The applied input type(s) for the category 'violence'. + # @param violence [Array] The applied input type(s) for the category 'violence'. # - # @param violence_graphic [Array] The applied input type(s) for the category 'violence/graphic'. + # @param violence_graphic [Array] The applied input type(s) for the category 'violence/graphic'. module Harassment extend OpenAI::Internal::Type::Enum @@ -449,7 +449,7 @@ module ViolenceGraphic end end - # @see OpenAI::Moderation#category_scores + # @see OpenAI::Models::Moderation#category_scores class CategoryScores < OpenAI::Internal::Type::BaseModel # @!attribute harassment # The score for the category 'harassment'. diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 72f3a355..008c8b28 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -11,7 +11,7 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. # - # @return [String, Array, Array] + # @return [String, Array, Array] required :input, union: -> { OpenAI::ModerationCreateParams::Input } # @!attribute model @@ -20,16 +20,16 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # learn about available models # [here](https://platform.openai.com/docs/models#moderation). # - # @return [String, Symbol, OpenAI::ModerationModel, nil] + # @return [String, Symbol, OpenAI::Models::ModerationModel, nil] optional :model, union: -> { OpenAI::ModerationCreateParams::Model } # @!method initialize(input:, model: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::ModerationCreateParams} for more details. # - # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or + # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or # - # @param model [String, Symbol, OpenAI::ModerationModel] The content moderation model you would like to use. Learn more in + # @param model [String, Symbol, OpenAI::Models::ModerationModel] The content moderation model you would like to use. Learn more in # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -42,23 +42,13 @@ module Input variant String # An array of strings to classify for moderation. - variant -> { OpenAI::ModerationCreateParams::Input::StringArray } + variant -> { OpenAI::Models::ModerationCreateParams::Input::StringArray } # An array of multi-modal inputs to the moderation model. - variant -> { OpenAI::ModerationCreateParams::Input::ModerationMultiModalInputArray } + variant -> { OpenAI::Models::ModerationCreateParams::Input::ModerationMultiModalInputArray } # @!method self.variants - # @return [Array(String, Array, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[String], - T::Array[T.any(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput)] - ) - end - end + # @return [Array(String, Array, Array)] # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -83,11 +73,7 @@ module Model variant enum: -> { OpenAI::ModerationModel } # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ModerationModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, OpenAI::ModerationModel::TaggedSymbol) } - end + # @return [Array(String, Symbol, OpenAI::Models::ModerationModel)] end end end diff --git a/lib/openai/models/moderation_create_response.rb b/lib/openai/models/moderation_create_response.rb index 17c60d91..0085e8a8 100644 --- a/lib/openai/models/moderation_create_response.rb +++ b/lib/openai/models/moderation_create_response.rb @@ -19,7 +19,7 @@ class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # @!attribute results # A list of moderation objects. # - # @return [Array] + # @return [Array] required :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Moderation] } # @!method initialize(id:, model:, results:) @@ -29,7 +29,7 @@ class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # # @param model [String] The model used to generate the moderation results. # - # @param results [Array] A list of moderation objects. + # @param results [Array] A list of moderation objects. end end end diff --git a/lib/openai/models/moderation_image_url_input.rb b/lib/openai/models/moderation_image_url_input.rb index ed95c5b8..dca658e1 100644 --- a/lib/openai/models/moderation_image_url_input.rb +++ b/lib/openai/models/moderation_image_url_input.rb @@ -6,7 +6,7 @@ class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # @!attribute image_url # Contains either an image URL or a data URL for a base64 encoded image. # - # @return [OpenAI::ModerationImageURLInput::ImageURL] + # @return [OpenAI::Models::ModerationImageURLInput::ImageURL] required :image_url, -> { OpenAI::ModerationImageURLInput::ImageURL } # @!attribute type @@ -18,11 +18,11 @@ class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # @!method initialize(image_url:, type: :image_url) # An object describing an image to classify. # - # @param image_url [OpenAI::ModerationImageURLInput::ImageURL] Contains either an image URL or a data URL for a base64 encoded image. + # @param image_url [OpenAI::Models::ModerationImageURLInput::ImageURL] Contains either an image URL or a data URL for a base64 encoded image. # # @param type [Symbol, :image_url] Always `image_url`. - # @see OpenAI::ModerationImageURLInput#image_url + # @see OpenAI::Models::ModerationImageURLInput#image_url class ImageURL < OpenAI::Internal::Type::BaseModel # @!attribute url # Either a URL of the image or the base64 encoded image data. diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index ef09466b..8b89503d 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -15,11 +15,7 @@ module ModerationMultiModalInput variant :text, -> { OpenAI::ModerationTextInput } # @!method self.variants - # @return [Array(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::ModerationImageURLInput, OpenAI::ModerationTextInput) } - end + # @return [Array(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] end end end diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index 9c418c18..2cdebe06 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -11,7 +11,7 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # supported values are `low`, `medium`, and `high`. Reducing reasoning effort can # result in faster responses and fewer tokens used on reasoning in a response. # - # @return [Symbol, OpenAI::ReasoningEffort, nil] + # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :effort, enum: -> { OpenAI::ReasoningEffort }, nil?: true # @!attribute generate_summary @@ -23,7 +23,7 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @return [Symbol, OpenAI::Reasoning::GenerateSummary, nil] + # @return [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] optional :generate_summary, enum: -> { OpenAI::Reasoning::GenerateSummary }, nil?: true # @!attribute summary @@ -31,23 +31,23 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @return [Symbol, OpenAI::Reasoning::Summary, nil] + # @return [Symbol, OpenAI::Models::Reasoning::Summary, nil] optional :summary, enum: -> { OpenAI::Reasoning::Summary }, nil?: true # @!method initialize(effort: nil, generate_summary: nil, summary: nil) - # Some parameter documentations has been truncated, see {OpenAI::Reasoning} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Reasoning} for more details. # # **o-series models only** # # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @param effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param generate_summary [Symbol, OpenAI::Reasoning::GenerateSummary, nil] **Deprecated:** use `summary` instead. + # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] **Deprecated:** use `summary` instead. # - # @param summary [Symbol, OpenAI::Reasoning::Summary, nil] A summary of the reasoning performed by the model. This can be + # @param summary [Symbol, OpenAI::Models::Reasoning::Summary, nil] A summary of the reasoning performed by the model. This can be # @deprecated # @@ -57,7 +57,7 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @see OpenAI::Reasoning#generate_summary + # @see OpenAI::Models::Reasoning#generate_summary module GenerateSummary extend OpenAI::Internal::Type::Enum @@ -73,7 +73,7 @@ module GenerateSummary # debugging and understanding the model's reasoning process. One of `auto`, # `concise`, or `detailed`. # - # @see OpenAI::Reasoning#summary + # @see OpenAI::Models::Reasoning#summary module Summary extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 294afc55..e84b2194 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -6,7 +6,7 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute json_schema # Structured Outputs configuration options, including a JSON Schema. # - # @return [OpenAI::ResponseFormatJSONSchema::JSONSchema] + # @return [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] required :json_schema, -> { OpenAI::ResponseFormatJSONSchema::JSONSchema } # @!attribute type @@ -17,17 +17,17 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # @!method initialize(json_schema:, type: :json_schema) # Some parameter documentations has been truncated, see - # {OpenAI::ResponseFormatJSONSchema} for more details. + # {OpenAI::Models::ResponseFormatJSONSchema} for more details. # # JSON Schema response format. Used to generate structured JSON responses. Learn # more about # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). # - # @param json_schema [OpenAI::ResponseFormatJSONSchema::JSONSchema] Structured Outputs configuration options, including a JSON Schema. + # @param json_schema [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] Structured Outputs configuration options, including a JSON Schema. # # @param type [Symbol, :json_schema] The type of response format being defined. Always `json_schema`. - # @see OpenAI::ResponseFormatJSONSchema#json_schema + # @see OpenAI::Models::ResponseFormatJSONSchema#json_schema class JSONSchema < OpenAI::Internal::Type::BaseModel # @!attribute name # The name of the response format. Must be a-z, A-Z, 0-9, or contain underscores @@ -48,7 +48,13 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # to build JSON schemas [here](https://json-schema.org/). # # @return [Hash{Symbol=>Object}, nil] - optional :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] + optional :schema, + union: -> { + OpenAI::StructuredOutput::UnionOf[ + OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown], + OpenAI::StructuredOutput::JsonSchemaConverter + ] + } # @!attribute strict # Whether to enable strict schema adherence when generating the output. If set to @@ -62,7 +68,7 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, description: nil, schema: nil, strict: nil) # Some parameter documentations has been truncated, see - # {OpenAI::ResponseFormatJSONSchema::JSONSchema} for more details. + # {OpenAI::Models::ResponseFormatJSONSchema::JSONSchema} for more details. # # Structured Outputs configuration options, including a JSON Schema. # @@ -70,7 +76,7 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # # @param description [String] A description of what the response format is for, used by the model to # - # @param schema [Hash{Symbol=>Object}] The schema for the response format, described as a JSON Schema object. + # @param schema [Hash{Symbol=>Object}, OpenAI::StructuredOutput::JsonSchemaConverter] The schema for the response format, described as a JSON Schema object. # # @param strict [Boolean, nil] Whether to enable strict schema adherence when generating the output. end diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index e6b14978..b883865c 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -19,7 +19,7 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel # @!attribute environment # The type of computer environment to control. # - # @return [Symbol, OpenAI::Responses::ComputerTool::Environment] + # @return [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] required :environment, enum: -> { OpenAI::Responses::ComputerTool::Environment } # @!attribute type @@ -36,13 +36,13 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel # # @param display_width [Integer] The width of the computer display. # - # @param environment [Symbol, OpenAI::Responses::ComputerTool::Environment] The type of computer environment to control. + # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] The type of computer environment to control. # # @param type [Symbol, :computer_use_preview] The type of the computer use tool. Always `computer_use_preview`. # The type of computer environment to control. # - # @see OpenAI::Responses::ComputerTool#environment + # @see OpenAI::Models::Responses::ComputerTool#environment module Environment extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index ecf0c374..64f37584 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -8,25 +8,25 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. # - # @return [String, Array] + # @return [String, Array] required :content, union: -> { OpenAI::Responses::EasyInputMessage::Content } # @!attribute role # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @return [Symbol, OpenAI::Responses::EasyInputMessage::Role] + # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] required :role, enum: -> { OpenAI::Responses::EasyInputMessage::Role } # @!attribute type # The type of the message input. Always `message`. # - # @return [Symbol, OpenAI::Responses::EasyInputMessage::Type, nil] + # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type, nil] optional :type, enum: -> { OpenAI::Responses::EasyInputMessage::Type } # @!method initialize(content:, role:, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::EasyInputMessage} for more details. + # {OpenAI::Models::Responses::EasyInputMessage} for more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -34,16 +34,16 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # `assistant` role are presumed to have been generated by the model in previous # interactions. # - # @param content [String, Array] Text, image, or audio input to the model, used to generate a response. + # @param content [String, Array] Text, image, or audio input to the model, used to generate a response. # - # @param role [Symbol, OpenAI::Responses::EasyInputMessage::Role] The role of the message input. One of `user`, `assistant`, `system`, or + # @param role [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] The role of the message input. One of `user`, `assistant`, `system`, or # - # @param type [Symbol, OpenAI::Responses::EasyInputMessage::Type] The type of the message input. Always `message`. + # @param type [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] The type of the message input. Always `message`. # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. # - # @see OpenAI::Responses::EasyInputMessage#content + # @see OpenAI::Models::Responses::EasyInputMessage#content module Content extend OpenAI::Internal::Type::Union @@ -55,28 +55,13 @@ module Content variant -> { OpenAI::Responses::ResponseInputMessageContentList } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ] - ) - end - end + # @return [Array(String, Array)] end # The role of the message input. One of `user`, `assistant`, `system`, or # `developer`. # - # @see OpenAI::Responses::EasyInputMessage#role + # @see OpenAI::Models::Responses::EasyInputMessage#role module Role extend OpenAI::Internal::Type::Enum @@ -91,7 +76,7 @@ module Role # The type of the message input. Always `message`. # - # @see OpenAI::Responses::EasyInputMessage#type + # @see OpenAI::Models::Responses::EasyInputMessage#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 938f84c6..aead0521 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -19,7 +19,7 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute filters # A filter to apply. # - # @return [OpenAI::ComparisonFilter, OpenAI::CompoundFilter, nil] + # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] optional :filters, union: -> { OpenAI::Responses::FileSearchTool::Filters }, nil?: true # @!attribute max_num_results @@ -32,12 +32,12 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @!attribute ranking_options # Ranking options for search. # - # @return [OpenAI::Responses::FileSearchTool::RankingOptions, nil] + # @return [OpenAI::Models::Responses::FileSearchTool::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Responses::FileSearchTool::RankingOptions } # @!method initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::FileSearchTool} for more details. + # {OpenAI::Models::Responses::FileSearchTool} for more details. # # A tool that searches for relevant content from uploaded files. Learn more about # the @@ -45,17 +45,17 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # # @param vector_store_ids [Array] The IDs of the vector stores to search. # - # @param filters [OpenAI::ComparisonFilter, OpenAI::CompoundFilter, nil] A filter to apply. + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] A filter to apply. # # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 # - # @param ranking_options [OpenAI::Responses::FileSearchTool::RankingOptions] Ranking options for search. + # @param ranking_options [OpenAI::Models::Responses::FileSearchTool::RankingOptions] Ranking options for search. # # @param type [Symbol, :file_search] The type of the file search tool. Always `file_search`. # A filter to apply. # - # @see OpenAI::Responses::FileSearchTool#filters + # @see OpenAI::Models::Responses::FileSearchTool#filters module Filters extend OpenAI::Internal::Type::Union @@ -66,19 +66,15 @@ module Filters variant -> { OpenAI::CompoundFilter } # @!method self.variants - # @return [Array(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter) } - end + # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] end - # @see OpenAI::Responses::FileSearchTool#ranking_options + # @see OpenAI::Models::Responses::FileSearchTool#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # The ranker to use for the file search. # - # @return [Symbol, OpenAI::Responses::FileSearchTool::RankingOptions::Ranker, nil] + # @return [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Responses::FileSearchTool::RankingOptions::Ranker } # @!attribute score_threshold @@ -91,17 +87,17 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(ranker: nil, score_threshold: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::FileSearchTool::RankingOptions} for more details. + # {OpenAI::Models::Responses::FileSearchTool::RankingOptions} for more details. # # Ranking options for search. # - # @param ranker [Symbol, OpenAI::Responses::FileSearchTool::RankingOptions::Ranker] The ranker to use for the file search. + # @param ranker [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] The ranker to use for the file search. # # @param score_threshold [Float] The score threshold for the file search, a number between 0 and 1. Numbers close # The ranker to use for the file search. # - # @see OpenAI::Responses::FileSearchTool::RankingOptions#ranker + # @see OpenAI::Models::Responses::FileSearchTool::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/function_tool.rb b/lib/openai/models/responses/function_tool.rb index 246cf91a..f4db7602 100644 --- a/lib/openai/models/responses/function_tool.rb +++ b/lib/openai/models/responses/function_tool.rb @@ -37,7 +37,7 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, parameters:, strict:, description: nil, type: :function) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::FunctionTool} for more details. + # {OpenAI::Models::Responses::FunctionTool} for more details. # # Defines a function in your own code the model can choose to call. Learn more # about diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index fb2306d3..cb40574b 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -24,7 +24,7 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } # @!attribute limit @@ -35,12 +35,12 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel optional :limit, Integer # @!attribute order - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. # - # @return [Symbol, OpenAI::Responses::InputItemListParams::Order, nil] + # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order, nil] optional :order, enum: -> { OpenAI::Responses::InputItemListParams::Order } # @!method initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) @@ -51,15 +51,15 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # # @param before [String] An item ID to list items before, used in pagination. # - # @param include [Array] Additional fields to include in the response. See the `include` + # @param include [Array] Additional fields to include in the response. See the `include` # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between # - # @param order [Symbol, OpenAI::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `desc`. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index d8358f27..20471340 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -22,13 +22,13 @@ class Response < OpenAI::Internal::Type::BaseModel # @!attribute error # An error object returned when the model fails to generate a Response. # - # @return [OpenAI::Responses::ResponseError, nil] + # @return [OpenAI::Models::Responses::ResponseError, nil] required :error, -> { OpenAI::Responses::ResponseError }, nil?: true # @!attribute incomplete_details # Details about why the response is incomplete. # - # @return [OpenAI::Responses::Response::IncompleteDetails, nil] + # @return [OpenAI::Models::Responses::Response::IncompleteDetails, nil] required :incomplete_details, -> { OpenAI::Responses::Response::IncompleteDetails }, nil?: true # @!attribute instructions @@ -60,7 +60,7 @@ class Response < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] required :model, union: -> { OpenAI::ResponsesModel } # @!attribute object @@ -78,7 +78,7 @@ class Response < OpenAI::Internal::Type::BaseModel # an `assistant` message with the content generated by the model, you might # consider using the `output_text` property where supported in SDKs. # - # @return [Array] + # @return [Array] required :output, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputItem] } # @!attribute parallel_tool_calls @@ -101,7 +101,7 @@ class Response < OpenAI::Internal::Type::BaseModel # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @return [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] required :tool_choice, union: -> { OpenAI::Responses::Response::ToolChoice } # @!attribute tools @@ -120,7 +120,7 @@ class Response < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array] + # @return [Array] required :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -133,6 +133,13 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [Float, nil] required :top_p, Float, nil?: true + # @!attribute background + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + # + # @return [Boolean, nil] + optional :background, OpenAI::Internal::Type::Boolean, nil?: true + # @!attribute max_output_tokens # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and @@ -155,7 +162,7 @@ class Response < OpenAI::Internal::Type::BaseModel # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @return [OpenAI::Reasoning, nil] + # @return [OpenAI::Models::Reasoning, nil] optional :reasoning, -> { OpenAI::Reasoning }, nil?: true # @!attribute service_tier @@ -177,14 +184,14 @@ class Response < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Responses::Response::ServiceTier, nil] + # @return [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Responses::Response::ServiceTier }, nil?: true # @!attribute status # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, `cancelled`, `queued`, or `incomplete`. # - # @return [Symbol, OpenAI::Responses::ResponseStatus, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseStatus, nil] optional :status, enum: -> { OpenAI::Responses::ResponseStatus } # @!attribute text @@ -194,7 +201,7 @@ class Response < OpenAI::Internal::Type::BaseModel # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # - # @return [OpenAI::Responses::ResponseTextConfig, nil] + # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Responses::ResponseTextConfig } # @!attribute truncation @@ -206,90 +213,92 @@ class Response < OpenAI::Internal::Type::BaseModel # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. # - # @return [Symbol, OpenAI::Responses::Response::Truncation, nil] + # @return [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] optional :truncation, enum: -> { OpenAI::Responses::Response::Truncation }, nil?: true # @!attribute usage # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. # - # @return [OpenAI::Responses::ResponseUsage, nil] + # @return [OpenAI::Models::Responses::ResponseUsage, nil] optional :usage, -> { OpenAI::Responses::ResponseUsage } # @!attribute user - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] optional :user, String - # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) + # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, background: nil, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::Response} for more details. + # {OpenAI::Models::Responses::Response} for more details. # # @param id [String] Unique identifier for this Response. # # @param created_at [Float] Unix timestamp (in seconds) of when this Response was created. # - # @param error [OpenAI::Responses::ResponseError, nil] An error object returned when the model fails to generate a Response. + # @param error [OpenAI::Models::Responses::ResponseError, nil] An error object returned when the model fails to generate a Response. # - # @param incomplete_details [OpenAI::Responses::Response::IncompleteDetails, nil] Details about why the response is incomplete. + # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] Details about why the response is incomplete. # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param output [Array] An array of content items generated by the model. + # @param output [Array] An array of content items generated by the model. # # @param parallel_tool_calls [Boolean] Whether to allow the model to run tool calls in parallel. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # + # @param background [Boolean, nil] Whether to run the model response in the background. + # # @param max_output_tokens [Integer, nil] An upper bound for the number of tokens that can be generated for a response, in # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Responses::Response::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # - # @param status [Symbol, OpenAI::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, + # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] The status of the response generation. One of `completed`, `failed`, # - # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param truncation [Symbol, OpenAI::Responses::Response::Truncation, nil] The truncation strategy to use for the model response. + # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] The truncation strategy to use for the model response. # - # @param usage [OpenAI::Responses::ResponseUsage] Represents token usage details including input tokens, output tokens, + # @param usage [OpenAI::Models::Responses::ResponseUsage] Represents token usage details including input tokens, output tokens, # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param object [Symbol, :response] The object type of this resource - always set to `response`. - # @see OpenAI::Responses::Response#incomplete_details + # @see OpenAI::Models::Responses::Response#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @!attribute reason # The reason why the response is incomplete. # - # @return [Symbol, OpenAI::Responses::Response::IncompleteDetails::Reason, nil] + # @return [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason, nil] optional :reason, enum: -> { OpenAI::Responses::Response::IncompleteDetails::Reason } # @!method initialize(reason: nil) # Details about why the response is incomplete. # - # @param reason [Symbol, OpenAI::Responses::Response::IncompleteDetails::Reason] The reason why the response is incomplete. + # @param reason [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] The reason why the response is incomplete. # The reason why the response is incomplete. # - # @see OpenAI::Responses::Response::IncompleteDetails#reason + # @see OpenAI::Models::Responses::Response::IncompleteDetails#reason module Reason extend OpenAI::Internal::Type::Enum @@ -305,7 +314,7 @@ module Reason # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @see OpenAI::Responses::Response#tool_choice + # @see OpenAI::Models::Responses::Response#tool_choice module ToolChoice extend OpenAI::Internal::Type::Union @@ -327,17 +336,7 @@ module ToolChoice variant -> { OpenAI::Responses::ToolChoiceFunction } # @!method self.variants - # @return [Array(Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction - ) - end - end + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -358,7 +357,7 @@ module ToolChoice # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @see OpenAI::Responses::Response#service_tier + # @see OpenAI::Models::Responses::Response#service_tier module ServiceTier extend OpenAI::Internal::Type::Enum @@ -378,7 +377,7 @@ module ServiceTier # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. # - # @see OpenAI::Responses::Response#truncation + # @see OpenAI::Models::Responses::Response#truncation module Truncation extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_audio_delta_event.rb b/lib/openai/models/responses/response_audio_delta_event.rb index f630ddd2..07ecdb3d 100644 --- a/lib/openai/models/responses/response_audio_delta_event.rb +++ b/lib/openai/models/responses/response_audio_delta_event.rb @@ -10,20 +10,28 @@ class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [String] required :delta, String + # @!attribute sequence_number + # A sequence number for this chunk of the stream response. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.audio.delta`. # # @return [Symbol, :"response.audio.delta"] required :type, const: :"response.audio.delta" - # @!method initialize(delta:, type: :"response.audio.delta") + # @!method initialize(delta:, sequence_number:, type: :"response.audio.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseAudioDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseAudioDeltaEvent} for more details. # # Emitted when there is a partial audio response. # # @param delta [String] A chunk of Base64 encoded response audio bytes. # + # @param sequence_number [Integer] A sequence number for this chunk of the stream response. + # # @param type [Symbol, :"response.audio.delta"] The type of the event. Always `response.audio.delta`. end end diff --git a/lib/openai/models/responses/response_audio_done_event.rb b/lib/openai/models/responses/response_audio_done_event.rb index d156cd64..c40cf77a 100644 --- a/lib/openai/models/responses/response_audio_done_event.rb +++ b/lib/openai/models/responses/response_audio_done_event.rb @@ -4,18 +4,26 @@ module OpenAI module Models module Responses class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of the delta. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.audio.done`. # # @return [Symbol, :"response.audio.done"] required :type, const: :"response.audio.done" - # @!method initialize(type: :"response.audio.done") + # @!method initialize(sequence_number:, type: :"response.audio.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseAudioDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseAudioDoneEvent} for more details. # # Emitted when the audio response is complete. # + # @param sequence_number [Integer] The sequence number of the delta. + # # @param type [Symbol, :"response.audio.done"] The type of the event. Always `response.audio.done`. end end diff --git a/lib/openai/models/responses/response_audio_transcript_delta_event.rb b/lib/openai/models/responses/response_audio_transcript_delta_event.rb index 1361afe0..96372cc0 100644 --- a/lib/openai/models/responses/response_audio_transcript_delta_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_delta_event.rb @@ -10,20 +10,28 @@ class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [String] required :delta, String + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.audio.transcript.delta`. # # @return [Symbol, :"response.audio.transcript.delta"] required :type, const: :"response.audio.transcript.delta" - # @!method initialize(delta:, type: :"response.audio.transcript.delta") + # @!method initialize(delta:, sequence_number:, type: :"response.audio.transcript.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseAudioTranscriptDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent} for more details. # # Emitted when there is a partial transcript of audio. # # @param delta [String] The partial transcript of the audio response. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.audio.transcript.delta"] The type of the event. Always `response.audio.transcript.delta`. end end diff --git a/lib/openai/models/responses/response_audio_transcript_done_event.rb b/lib/openai/models/responses/response_audio_transcript_done_event.rb index 13d588f5..9e0d38b9 100644 --- a/lib/openai/models/responses/response_audio_transcript_done_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_done_event.rb @@ -4,18 +4,26 @@ module OpenAI module Models module Responses class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.audio.transcript.done`. # # @return [Symbol, :"response.audio.transcript.done"] required :type, const: :"response.audio.transcript.done" - # @!method initialize(type: :"response.audio.transcript.done") + # @!method initialize(sequence_number:, type: :"response.audio.transcript.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseAudioTranscriptDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent} for more details. # # Emitted when the full audio transcript is completed. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.audio.transcript.done"] The type of the event. Always `response.audio.transcript.done`. end end diff --git a/lib/openai/models/responses/response_cancel_params.rb b/lib/openai/models/responses/response_cancel_params.rb new file mode 100644 index 00000000..a06d628c --- /dev/null +++ b/lib/openai/models/responses/response_cancel_params.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + # @see OpenAI::Resources::Responses#cancel + class ResponseCancelParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + end + end + end +end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb index 07cebc16..e8413a69 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb @@ -16,15 +16,22 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.code_interpreter_call.code.delta`. # # @return [Symbol, :"response.code_interpreter_call.code.delta"] required :type, const: :"response.code_interpreter_call.code.delta" - # @!method initialize(delta:, output_index:, type: :"response.code_interpreter_call.code.delta") + # @!method initialize(delta:, output_index:, sequence_number:, type: :"response.code_interpreter_call.code.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent} for more + # details. # # Emitted when a partial code snippet is added by the code interpreter. # @@ -32,6 +39,8 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.code_interpreter_call.code.delta"] The type of the event. Always `response.code_interpreter_call.code.delta`. end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb index 40845a15..beaab321 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb @@ -16,15 +16,22 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.code_interpreter_call.code.done`. # # @return [Symbol, :"response.code_interpreter_call.code.done"] required :type, const: :"response.code_interpreter_call.code.done" - # @!method initialize(code:, output_index:, type: :"response.code_interpreter_call.code.done") + # @!method initialize(code:, output_index:, sequence_number:, type: :"response.code_interpreter_call.code.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent} for more + # details. # # Emitted when code snippet output is finalized by the code interpreter. # @@ -32,6 +39,8 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.code_interpreter_call.code.done"] The type of the event. Always `response.code_interpreter_call.code.done`. end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb index 37bf5f84..0e4b05f0 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb @@ -7,7 +7,7 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # @!attribute code_interpreter_call # A tool call to run code. # - # @return [OpenAI::Responses::ResponseCodeInterpreterToolCall] + # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } # @!attribute output_index @@ -16,22 +16,31 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.code_interpreter_call.completed`. # # @return [Symbol, :"response.code_interpreter_call.completed"] required :type, const: :"response.code_interpreter_call.completed" - # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.completed") + # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent} for more details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent} for more + # details. # # Emitted when the code interpreter call is completed. # - # @param code_interpreter_call [OpenAI::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.code_interpreter_call.completed"] The type of the event. Always `response.code_interpreter_call.completed`. end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb index 96683a3d..26ec12b4 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb @@ -7,7 +7,7 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # @!attribute code_interpreter_call # A tool call to run code. # - # @return [OpenAI::Responses::ResponseCodeInterpreterToolCall] + # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } # @!attribute output_index @@ -16,23 +16,31 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.code_interpreter_call.in_progress`. # # @return [Symbol, :"response.code_interpreter_call.in_progress"] required :type, const: :"response.code_interpreter_call.in_progress" - # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.in_progress") + # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent} for more + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent} for more # details. # # Emitted when a code interpreter call is in progress. # - # @param code_interpreter_call [OpenAI::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.code_interpreter_call.in_progress"] The type of the event. Always `response.code_interpreter_call.in_progress`. end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb index 1cf23747..f0cf91cc 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb @@ -7,7 +7,7 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # @!attribute code_interpreter_call # A tool call to run code. # - # @return [OpenAI::Responses::ResponseCodeInterpreterToolCall] + # @return [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] required :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } # @!attribute output_index @@ -16,23 +16,31 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.code_interpreter_call.interpreting`. # # @return [Symbol, :"response.code_interpreter_call.interpreting"] required :type, const: :"response.code_interpreter_call.interpreting" - # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.interpreting") + # @!method initialize(code_interpreter_call:, output_index:, sequence_number:, type: :"response.code_interpreter_call.interpreting") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent} for more - # details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent} for + # more details. # # Emitted when the code interpreter is actively interpreting the code snippet. # - # @param code_interpreter_call [OpenAI::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] A tool call to run code. # # @param output_index [Integer] The index of the output item that the code interpreter call is in progress. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.code_interpreter_call.interpreting"] The type of the event. Always `response.code_interpreter_call.interpreting`. end end diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 81114445..f04eba44 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -19,7 +19,7 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute results # The results of the code interpreter tool call. # - # @return [Array] + # @return [Array] required :results, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Result] @@ -28,7 +28,7 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the code interpreter tool call. # - # @return [Symbol, OpenAI::Responses::ResponseCodeInterpreterToolCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseCodeInterpreterToolCall::Status } # @!attribute type @@ -37,9 +37,15 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter_call] required :type, const: :code_interpreter_call - # @!method initialize(id:, code:, results:, status:, type: :code_interpreter_call) + # @!attribute container_id + # The ID of the container used to run the code. + # + # @return [String, nil] + optional :container_id, String + + # @!method initialize(id:, code:, results:, status:, container_id: nil, type: :code_interpreter_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterToolCall} for more details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall} for more details. # # A tool call to run code. # @@ -47,13 +53,15 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # # @param code [String] The code to run. # - # @param results [Array] The results of the code interpreter tool call. + # @param results [Array] The results of the code interpreter tool call. + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. # - # @param status [Symbol, OpenAI::Responses::ResponseCodeInterpreterToolCall::Status] The status of the code interpreter tool call. + # @param container_id [String] The ID of the container used to run the code. # # @param type [Symbol, :code_interpreter_call] The type of the code interpreter tool call. Always `code_interpreter_call`. - # The output of a code interpreter tool call that is text. + # The output of a code interpreter tool. module Result extend OpenAI::Internal::Type::Union @@ -80,8 +88,8 @@ class Logs < OpenAI::Internal::Type::BaseModel # @!method initialize(logs:, type: :logs) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs} for more - # details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs} for + # more details. # # The output of a code interpreter tool call that is text. # @@ -93,7 +101,7 @@ class Logs < OpenAI::Internal::Type::BaseModel class Files < OpenAI::Internal::Type::BaseModel # @!attribute files # - # @return [Array] + # @return [Array] required :files, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File] @@ -107,12 +115,12 @@ class Files < OpenAI::Internal::Type::BaseModel # @!method initialize(files:, type: :files) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files} for more - # details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files} for + # more details. # # The output of a code interpreter tool call that is a file. # - # @param files [Array] + # @param files [Array] # # @param type [Symbol, :files] The type of the code interpreter file output. Always `files`. @@ -131,8 +139,8 @@ class File < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:, mime_type:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File} for - # more details. + # {OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files::File} + # for more details. # # @param file_id [String] The ID of the file. # @@ -141,21 +149,12 @@ class File < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs, - OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files - ) - end - end + # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] end # The status of the code interpreter tool call. # - # @see OpenAI::Responses::ResponseCodeInterpreterToolCall#status + # @see OpenAI::Models::Responses::ResponseCodeInterpreterToolCall#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_completed_event.rb b/lib/openai/models/responses/response_completed_event.rb index 5bbb6426..37e03c75 100644 --- a/lib/openai/models/responses/response_completed_event.rb +++ b/lib/openai/models/responses/response_completed_event.rb @@ -7,22 +7,30 @@ class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # Properties of the completed response. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number for this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.completed`. # # @return [Symbol, :"response.completed"] required :type, const: :"response.completed" - # @!method initialize(response:, type: :"response.completed") + # @!method initialize(response:, sequence_number:, type: :"response.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCompletedEvent} for more details. + # {OpenAI::Models::Responses::ResponseCompletedEvent} for more details. # # Emitted when the model response is complete. # - # @param response [OpenAI::Responses::Response] Properties of the completed response. + # @param response [OpenAI::Models::Responses::Response] Properties of the completed response. + # + # @param sequence_number [Integer] The sequence number for this event. # # @param type [Symbol, :"response.completed"] The type of the event. Always `response.completed`. end diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index 9a70b230..4bb9bf28 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -13,7 +13,7 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @!attribute action # A click action. # - # @return [OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait] + # @return [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] required :action, union: -> { OpenAI::Responses::ResponseComputerToolCall::Action } # @!attribute call_id @@ -25,7 +25,7 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @!attribute pending_safety_checks # The pending safety checks for the computer call. # - # @return [Array] + # @return [Array] required :pending_safety_checks, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck] @@ -35,18 +35,18 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseComputerToolCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseComputerToolCall::Status } # @!attribute type # The type of the computer call. Always `computer_call`. # - # @return [Symbol, OpenAI::Responses::ResponseComputerToolCall::Type] + # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] required :type, enum: -> { OpenAI::Responses::ResponseComputerToolCall::Type } # @!method initialize(id:, action:, call_id:, pending_safety_checks:, status:, type:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall} for more details. # # A tool call to a computer use tool. See the # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) @@ -54,19 +54,19 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique ID of the computer call. # - # @param action [OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait] A click action. + # @param action [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] A click action. # # @param call_id [String] An identifier used when responding to the tool call with output. # - # @param pending_safety_checks [Array] The pending safety checks for the computer call. + # @param pending_safety_checks [Array] The pending safety checks for the computer call. # - # @param status [Symbol, OpenAI::Responses::ResponseComputerToolCall::Status] The status of the item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] The status of the item. One of `in_progress`, `completed`, or # - # @param type [Symbol, OpenAI::Responses::ResponseComputerToolCall::Type] The type of the computer call. Always `computer_call`. + # @param type [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] The type of the computer call. Always `computer_call`. # A click action. # - # @see OpenAI::Responses::ResponseComputerToolCall#action + # @see OpenAI::Models::Responses::ResponseComputerToolCall#action module Action extend OpenAI::Internal::Type::Union @@ -104,7 +104,7 @@ class Click < OpenAI::Internal::Type::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. # - # @return [Symbol, OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button] + # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] required :button, enum: -> { OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button } # @!attribute type @@ -128,11 +128,12 @@ class Click < OpenAI::Internal::Type::BaseModel # @!method initialize(button:, x:, y_:, type: :click) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Click} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click} for more + # details. # # A click action. # - # @param button [Symbol, OpenAI::Responses::ResponseComputerToolCall::Action::Click::Button] Indicates which mouse button was pressed during the click. One of `left`, `right + # @param button [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] Indicates which mouse button was pressed during the click. One of `left`, `right # # @param x [Integer] The x-coordinate where the click occurred. # @@ -143,7 +144,7 @@ class Click < OpenAI::Internal::Type::BaseModel # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. # - # @see OpenAI::Responses::ResponseComputerToolCall::Action::Click#button + # @see OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click#button module Button extend OpenAI::Internal::Type::Enum @@ -180,8 +181,8 @@ class DoubleClick < OpenAI::Internal::Type::BaseModel # @!method initialize(x:, y_:, type: :double_click) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick} for more - # details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick} for + # more details. # # A double click action. # @@ -204,7 +205,7 @@ class Drag < OpenAI::Internal::Type::BaseModel # ] # ``` # - # @return [Array] + # @return [Array] required :path, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path] @@ -219,11 +220,12 @@ class Drag < OpenAI::Internal::Type::BaseModel # @!method initialize(path:, type: :drag) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Drag} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag} for more + # details. # # A drag action. # - # @param path [Array] An array of coordinates representing the path of the drag action. Coordinates wi + # @param path [Array] An array of coordinates representing the path of the drag action. Coordinates wi # # @param type [Symbol, :drag] Specifies the event type. For a drag action, this property is @@ -242,8 +244,8 @@ class Path < OpenAI::Internal::Type::BaseModel # @!method initialize(x:, y_:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path} for more - # details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag::Path} for + # more details. # # A series of x/y coordinate pairs in the drag path. # @@ -270,7 +272,7 @@ class Keypress < OpenAI::Internal::Type::BaseModel # @!method initialize(keys:, type: :keypress) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Keypress} for more + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress} for more # details. # # A collection of keypresses the model would like to perform. @@ -302,7 +304,8 @@ class Move < OpenAI::Internal::Type::BaseModel # @!method initialize(x:, y_:, type: :move) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Move} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move} for more + # details. # # A mouse move action. # @@ -323,8 +326,8 @@ class Screenshot < OpenAI::Internal::Type::BaseModel # @!method initialize(type: :screenshot) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot} for more - # details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot} for + # more details. # # A screenshot action. # @@ -365,7 +368,8 @@ class Scroll < OpenAI::Internal::Type::BaseModel # @!method initialize(scroll_x:, scroll_y:, x:, y_:, type: :scroll) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Scroll} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll} for more + # details. # # A scroll action. # @@ -396,7 +400,8 @@ class Type < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :type) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Type} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type} for more + # details. # # An action to type in text. # @@ -415,7 +420,8 @@ class Wait < OpenAI::Internal::Type::BaseModel # @!method initialize(type: :wait) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCall::Action::Wait} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait} for more + # details. # # A wait action. # @@ -423,23 +429,7 @@ class Wait < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Responses::ResponseComputerToolCall::Action::Wait)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseComputerToolCall::Action::Click, - OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick, - OpenAI::Responses::ResponseComputerToolCall::Action::Drag, - OpenAI::Responses::ResponseComputerToolCall::Action::Keypress, - OpenAI::Responses::ResponseComputerToolCall::Action::Move, - OpenAI::Responses::ResponseComputerToolCall::Action::Screenshot, - OpenAI::Responses::ResponseComputerToolCall::Action::Scroll, - OpenAI::Responses::ResponseComputerToolCall::Action::Type, - OpenAI::Responses::ResponseComputerToolCall::Action::Wait - ) - end - end + # @return [Array(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait)] end class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel @@ -474,7 +464,7 @@ class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseComputerToolCall#status + # @see OpenAI::Models::Responses::ResponseComputerToolCall#status module Status extend OpenAI::Internal::Type::Enum @@ -488,7 +478,7 @@ module Status # The type of the computer call. Always `computer_call`. # - # @see OpenAI::Responses::ResponseComputerToolCall#type + # @see OpenAI::Models::Responses::ResponseComputerToolCall#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index f18b9a16..71412a11 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -19,7 +19,7 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @!attribute output # A computer screenshot image used with the computer use tool. # - # @return [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] + # @return [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] required :output, -> { OpenAI::Responses::ResponseComputerToolCallOutputScreenshot } # @!attribute type @@ -32,7 +32,7 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # The safety checks reported by the API that have been acknowledged by the # developer. # - # @return [Array, nil] + # @return [Array, nil] optional :acknowledged_safety_checks, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] @@ -42,22 +42,23 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseComputerToolCallOutputItem::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseComputerToolCallOutputItem::Status } # @!method initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCallOutputItem} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCallOutputItem} for more + # details. # # @param id [String] The unique ID of the computer call tool output. # # @param call_id [String] The ID of the computer tool call that produced the output. # - # @param output [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. + # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. # - # @param acknowledged_safety_checks [Array] The safety checks reported by the API that have been acknowledged by the + # @param acknowledged_safety_checks [Array] The safety checks reported by the API that have been acknowledged by the # - # @param status [Symbol, OpenAI::Responses::ResponseComputerToolCallOutputItem::Status] The status of the message input. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] The status of the message input. One of `in_progress`, `completed`, or # # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. @@ -93,7 +94,7 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @see OpenAI::Responses::ResponseComputerToolCallOutputItem#status + # @see OpenAI::Models::Responses::ResponseComputerToolCallOutputItem#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb index 962d8ed7..91dcc4a5 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -25,7 +25,8 @@ class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseMod # @!method initialize(file_id: nil, image_url: nil, type: :computer_screenshot) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseComputerToolCallOutputScreenshot} for more details. + # {OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot} for more + # details. # # A computer screenshot image used with the computer use tool. # diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index 7b4e0c77..6c8a047a 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -23,19 +23,7 @@ module ResponseContent variant -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseInputText, OpenAI::Responses::ResponseInputImage, OpenAI::Responses::ResponseInputFile, OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile, - OpenAI::Responses::ResponseOutputText, - OpenAI::Responses::ResponseOutputRefusal - ) - end - end + # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 6b16ea84..9ef256b0 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -25,18 +25,24 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The content part that was added. # - # @return [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] + # @return [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] required :part, union: -> { OpenAI::Responses::ResponseContentPartAddedEvent::Part } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.content_part.added`. # # @return [Symbol, :"response.content_part.added"] required :type, const: :"response.content_part.added" - # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.added") + # @!method initialize(content_index:, item_id:, output_index:, part:, sequence_number:, type: :"response.content_part.added") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseContentPartAddedEvent} for more details. + # {OpenAI::Models::Responses::ResponseContentPartAddedEvent} for more details. # # Emitted when a new content part is added. # @@ -46,13 +52,15 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the content part was added to. # - # @param part [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] The content part that was added. + # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] The content part that was added. + # + # @param sequence_number [Integer] The sequence number of this event. # # @param type [Symbol, :"response.content_part.added"] The type of the event. Always `response.content_part.added`. # The content part that was added. # - # @see OpenAI::Responses::ResponseContentPartAddedEvent#part + # @see OpenAI::Models::Responses::ResponseContentPartAddedEvent#part module Part extend OpenAI::Internal::Type::Union @@ -65,13 +73,7 @@ module Part variant :refusal, -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal) - end - end + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index e2af41dd..1b7603b6 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -25,18 +25,24 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The content part that is done. # - # @return [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] + # @return [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] required :part, union: -> { OpenAI::Responses::ResponseContentPartDoneEvent::Part } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.content_part.done`. # # @return [Symbol, :"response.content_part.done"] required :type, const: :"response.content_part.done" - # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done") + # @!method initialize(content_index:, item_id:, output_index:, part:, sequence_number:, type: :"response.content_part.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseContentPartDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseContentPartDoneEvent} for more details. # # Emitted when a content part is done. # @@ -46,13 +52,15 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the content part was added to. # - # @param part [OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal] The content part that is done. + # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] The content part that is done. + # + # @param sequence_number [Integer] The sequence number of this event. # # @param type [Symbol, :"response.content_part.done"] The type of the event. Always `response.content_part.done`. # The content part that is done. # - # @see OpenAI::Responses::ResponseContentPartDoneEvent#part + # @see OpenAI::Models::Responses::ResponseContentPartDoneEvent#part module Part extend OpenAI::Internal::Type::Union @@ -65,13 +73,7 @@ module Part variant :refusal, -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal) - end - end + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index affb7fa5..2058a351 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -21,7 +21,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) # - # @return [String, Array] + # @return [String, Array] required :input, union: -> { OpenAI::Responses::ResponseCreateParams::Input } # @!attribute model @@ -31,9 +31,16 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. # - # @return [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] + # @return [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] required :model, union: -> { OpenAI::ResponsesModel } + # @!attribute background + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + # + # @return [Boolean, nil] + optional :background, OpenAI::Internal::Type::Boolean, nil?: true + # @!attribute include # Specify additional output data to include in the model response. Currently # supported values are: @@ -49,7 +56,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # the `store` parameter is set to `false`, or when an organization is enrolled # in the zero data retention program). # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] }, nil?: true @@ -104,7 +111,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # Configuration options for # [reasoning models](https://platform.openai.com/docs/guides/reasoning). # - # @return [OpenAI::Reasoning, nil] + # @return [OpenAI::Models::Reasoning, nil] optional :reasoning, -> { OpenAI::Reasoning }, nil?: true # @!attribute service_tier @@ -126,7 +133,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # When this parameter is set, the response body will include the `service_tier` # utilized. # - # @return [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Responses::ResponseCreateParams::ServiceTier }, nil?: true # @!attribute store @@ -151,7 +158,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # - # @return [OpenAI::Responses::ResponseTextConfig, nil] + # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Responses::ResponseTextConfig } # @!attribute tool_choice @@ -159,7 +166,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # response. See the `tools` parameter to see how to specify which tools the model # can call. # - # @return [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction, nil] + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, nil] optional :tool_choice, union: -> { OpenAI::Responses::ResponseCreateParams::ToolChoice } # @!attribute tools @@ -178,7 +185,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). # - # @return [Array, nil] + # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool] } # @!attribute top_p @@ -200,26 +207,28 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. # - # @return [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] optional :truncation, enum: -> { OpenAI::Responses::ResponseCreateParams::Truncation }, nil?: true # @!attribute user - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). # # @return [String, nil] optional :user, String - # @!method initialize(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @!method initialize(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseCreateParams} for more details. # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param background [Boolean, nil] Whether to run the model response in the background. # - # @param include [Array, nil] Specify additional output data to include in the model response. Currently + # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context # @@ -231,25 +240,25 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param truncation [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -274,30 +283,7 @@ module Input variant -> { OpenAI::Responses::ResponseInput } # @!method self.variants - # @return [Array(String, Array)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Responses::ResponseInputItem::Message, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Responses::ResponseReasoningItem, - OpenAI::Responses::ResponseInputItem::ItemReference - ) - ] - ) - end - end + # @return [Array(String, Array)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -352,17 +338,7 @@ module ToolChoice variant -> { OpenAI::Responses::ToolChoiceFunction } # @!method self.variants - # @return [Array(Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction - ) - end - end + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] end # The truncation strategy to use for the model response. diff --git a/lib/openai/models/responses/response_created_event.rb b/lib/openai/models/responses/response_created_event.rb index ec044aac..58d54869 100644 --- a/lib/openai/models/responses/response_created_event.rb +++ b/lib/openai/models/responses/response_created_event.rb @@ -7,22 +7,30 @@ class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that was created. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number for this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.created`. # # @return [Symbol, :"response.created"] required :type, const: :"response.created" - # @!method initialize(response:, type: :"response.created") + # @!method initialize(response:, sequence_number:, type: :"response.created") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseCreatedEvent} for more details. + # {OpenAI::Models::Responses::ResponseCreatedEvent} for more details. # # An event that is emitted when a response is created. # - # @param response [OpenAI::Responses::Response] The response that was created. + # @param response [OpenAI::Models::Responses::Response] The response that was created. + # + # @param sequence_number [Integer] The sequence number for this event. # # @param type [Symbol, :"response.created"] The type of the event. Always `response.created`. end diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index 45c2dda9..90c420f1 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -7,7 +7,7 @@ class ResponseError < OpenAI::Internal::Type::BaseModel # @!attribute code # The error code for the response. # - # @return [Symbol, OpenAI::Responses::ResponseError::Code] + # @return [Symbol, OpenAI::Models::Responses::ResponseError::Code] required :code, enum: -> { OpenAI::Responses::ResponseError::Code } # @!attribute message @@ -18,17 +18,17 @@ class ResponseError < OpenAI::Internal::Type::BaseModel # @!method initialize(code:, message:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseError} for more details. + # {OpenAI::Models::Responses::ResponseError} for more details. # # An error object returned when the model fails to generate a Response. # - # @param code [Symbol, OpenAI::Responses::ResponseError::Code] The error code for the response. + # @param code [Symbol, OpenAI::Models::Responses::ResponseError::Code] The error code for the response. # # @param message [String] A human-readable description of the error. # The error code for the response. # - # @see OpenAI::Responses::ResponseError#code + # @see OpenAI::Models::Responses::ResponseError#code module Code extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_error_event.rb b/lib/openai/models/responses/response_error_event.rb index 651a400d..9dde9fb4 100644 --- a/lib/openai/models/responses/response_error_event.rb +++ b/lib/openai/models/responses/response_error_event.rb @@ -22,15 +22,21 @@ class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel # @return [String, nil] required :param, String, nil?: true + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `error`. # # @return [Symbol, :error] required :type, const: :error - # @!method initialize(code:, message:, param:, type: :error) + # @!method initialize(code:, message:, param:, sequence_number:, type: :error) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseErrorEvent} for more details. + # {OpenAI::Models::Responses::ResponseErrorEvent} for more details. # # Emitted when an error occurs. # @@ -40,6 +46,8 @@ class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel # # @param param [String, nil] The error parameter. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :error] The type of the event. Always `error`. end end diff --git a/lib/openai/models/responses/response_failed_event.rb b/lib/openai/models/responses/response_failed_event.rb index f3454dd5..064ba785 100644 --- a/lib/openai/models/responses/response_failed_event.rb +++ b/lib/openai/models/responses/response_failed_event.rb @@ -7,22 +7,30 @@ class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that failed. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.failed`. # # @return [Symbol, :"response.failed"] required :type, const: :"response.failed" - # @!method initialize(response:, type: :"response.failed") + # @!method initialize(response:, sequence_number:, type: :"response.failed") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFailedEvent} for more details. + # {OpenAI::Models::Responses::ResponseFailedEvent} for more details. # # An event that is emitted when a response fails. # - # @param response [OpenAI::Responses::Response] The response that failed. + # @param response [OpenAI::Models::Responses::Response] The response that failed. + # + # @param sequence_number [Integer] The sequence number of this event. # # @param type [Symbol, :"response.failed"] The type of the event. Always `response.failed`. end diff --git a/lib/openai/models/responses/response_file_search_call_completed_event.rb b/lib/openai/models/responses/response_file_search_call_completed_event.rb index 5c616920..8ae559c0 100644 --- a/lib/openai/models/responses/response_file_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_file_search_call_completed_event.rb @@ -16,15 +16,22 @@ class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.file_search_call.completed`. # # @return [Symbol, :"response.file_search_call.completed"] required :type, const: :"response.file_search_call.completed" - # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.completed") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.file_search_call.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFileSearchCallCompletedEvent} for more details. + # {OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent} for more + # details. # # Emitted when a file search call is completed (results found). # @@ -32,6 +39,8 @@ class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the file search call is initiated. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.file_search_call.completed"] The type of the event. Always `response.file_search_call.completed`. end end diff --git a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb index 15ff92de..2dc256ec 100644 --- a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb @@ -16,15 +16,22 @@ class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.file_search_call.in_progress`. # # @return [Symbol, :"response.file_search_call.in_progress"] required :type, const: :"response.file_search_call.in_progress" - # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.in_progress") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.file_search_call.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFileSearchCallInProgressEvent} for more details. + # {OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent} for more + # details. # # Emitted when a file search call is initiated. # @@ -32,6 +39,8 @@ class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the file search call is initiated. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.file_search_call.in_progress"] The type of the event. Always `response.file_search_call.in_progress`. end end diff --git a/lib/openai/models/responses/response_file_search_call_searching_event.rb b/lib/openai/models/responses/response_file_search_call_searching_event.rb index 58540d2a..9cd93593 100644 --- a/lib/openai/models/responses/response_file_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_file_search_call_searching_event.rb @@ -16,15 +16,22 @@ class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.file_search_call.searching`. # # @return [Symbol, :"response.file_search_call.searching"] required :type, const: :"response.file_search_call.searching" - # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.searching") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.file_search_call.searching") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFileSearchCallSearchingEvent} for more details. + # {OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent} for more + # details. # # Emitted when a file search is currently searching. # @@ -32,6 +39,8 @@ class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the file search call is searching. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.file_search_call.searching"] The type of the event. Always `response.file_search_call.searching`. end end diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index f8f7fc1b..034f63b7 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -20,7 +20,7 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, # - # @return [Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] required :status, enum: -> { OpenAI::Responses::ResponseFileSearchToolCall::Status } # @!attribute type @@ -32,7 +32,7 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!attribute results # The results of the file search tool call. # - # @return [Array, nil] + # @return [Array, nil] optional :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseFileSearchToolCall::Result] @@ -41,7 +41,7 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, queries:, status:, results: nil, type: :file_search_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFileSearchToolCall} for more details. + # {OpenAI::Models::Responses::ResponseFileSearchToolCall} for more details. # # The results of a file search tool call. See the # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) @@ -51,16 +51,16 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel # # @param queries [Array] The queries used to search for files. # - # @param status [Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Status] The status of the file search tool call. One of `in_progress`, + # @param status [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] The status of the file search tool call. One of `in_progress`, # - # @param results [Array, nil] The results of the file search tool call. + # @param results [Array, nil] The results of the file search tool call. # # @param type [Symbol, :file_search_call] The type of the file search tool call. Always `file_search_call`. # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, # - # @see OpenAI::Responses::ResponseFileSearchToolCall#status + # @see OpenAI::Models::Responses::ResponseFileSearchToolCall#status module Status extend OpenAI::Internal::Type::Enum @@ -115,7 +115,8 @@ class Result < OpenAI::Internal::Type::BaseModel # @!method initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFileSearchToolCall::Result} for more details. + # {OpenAI::Models::Responses::ResponseFileSearchToolCall::Result} for more + # details. # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -138,10 +139,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index d81ab862..43f6ad0c 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -35,17 +35,7 @@ module ResponseFormatTextConfig variant :json_object, -> { OpenAI::ResponseFormatJSONObject } # @!method self.variants - # @return [Array(OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::ResponseFormatText, - OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, - OpenAI::ResponseFormatJSONObject - ) - end - end + # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject)] end end end diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index c4e33d24..06e57803 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -43,7 +43,8 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @!method initialize(name:, schema:, description: nil, strict: nil, type: :json_schema) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFormatTextJSONSchemaConfig} for more details. + # {OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig} for more + # details. # # JSON Schema response format. Used to generate structured JSON responses. Learn # more about diff --git a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb index 05a59f54..70a57547 100644 --- a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb @@ -22,15 +22,22 @@ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseMode # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.function_call_arguments.delta`. # # @return [Symbol, :"response.function_call_arguments.delta"] required :type, const: :"response.function_call_arguments.delta" - # @!method initialize(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta") + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.function_call_arguments.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent} for more + # details. # # Emitted when there is a partial function-call arguments delta. # @@ -40,6 +47,8 @@ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseMode # # @param output_index [Integer] The index of the output item that the function-call arguments delta is added to. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.function_call_arguments.delta"] The type of the event. Always `response.function_call_arguments.delta`. end end diff --git a/lib/openai/models/responses/response_function_call_arguments_done_event.rb b/lib/openai/models/responses/response_function_call_arguments_done_event.rb index ded7ece3..a5b29f4b 100644 --- a/lib/openai/models/responses/response_function_call_arguments_done_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_done_event.rb @@ -22,12 +22,18 @@ class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # # @return [Symbol, :"response.function_call_arguments.done"] required :type, const: :"response.function_call_arguments.done" - # @!method initialize(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done") + # @!method initialize(arguments:, item_id:, output_index:, sequence_number:, type: :"response.function_call_arguments.done") # Emitted when function-call arguments are finalized. # # @param arguments [String] The function-call arguments. @@ -36,6 +42,8 @@ class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.function_call_arguments.done"] end end diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index e972d9e8..55602423 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -38,12 +38,12 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseFunctionToolCall::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseFunctionToolCall::Status } # @!method initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFunctionToolCall} for more details. + # {OpenAI::Models::Responses::ResponseFunctionToolCall} for more details. # # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) @@ -57,14 +57,14 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique ID of the function tool call. # - # @param status [Symbol, OpenAI::Responses::ResponseFunctionToolCall::Status] The status of the item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] The status of the item. One of `in_progress`, `completed`, or # # @param type [Symbol, :function_call] The type of the function tool call. Always `function_call`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseFunctionToolCall#status + # @see OpenAI::Models::Responses::ResponseFunctionToolCall#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_function_tool_call_item.rb b/lib/openai/models/responses/response_function_tool_call_item.rb index e113338f..b0cbb3be 100644 --- a/lib/openai/models/responses/response_function_tool_call_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_item.rb @@ -12,7 +12,7 @@ class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunction # @!method initialize(id:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFunctionToolCallItem} for more details. + # {OpenAI::Models::Responses::ResponseFunctionToolCallItem} for more details. # # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 53e8afd3..bae34c36 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -32,12 +32,13 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status } # @!method initialize(id:, call_id:, output:, status: nil, type: :function_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFunctionToolCallOutputItem} for more details. + # {OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem} for more + # details. # # @param id [String] The unique ID of the function call tool output. # @@ -45,14 +46,14 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # # @param output [String] A JSON string of the output of the function tool call. # - # @param status [Symbol, OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status] The status of the item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] The status of the item. One of `in_progress`, `completed`, or # # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseFunctionToolCallOutputItem#status + # @see OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index f69ad27a..a95b9030 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -13,7 +13,7 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the web search tool call. # - # @return [Symbol, OpenAI::Responses::ResponseFunctionWebSearch::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] required :status, enum: -> { OpenAI::Responses::ResponseFunctionWebSearch::Status } # @!attribute type @@ -24,7 +24,7 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, status:, type: :web_search_call) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseFunctionWebSearch} for more details. + # {OpenAI::Models::Responses::ResponseFunctionWebSearch} for more details. # # The results of a web search tool call. See the # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for @@ -32,13 +32,13 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique ID of the web search tool call. # - # @param status [Symbol, OpenAI::Responses::ResponseFunctionWebSearch::Status] The status of the web search tool call. + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] The status of the web search tool call. # # @param type [Symbol, :web_search_call] The type of the web search tool call. Always `web_search_call`. # The status of the web search tool call. # - # @see OpenAI::Responses::ResponseFunctionWebSearch#status + # @see OpenAI::Models::Responses::ResponseFunctionWebSearch#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_image_gen_call_completed_event.rb b/lib/openai/models/responses/response_image_gen_call_completed_event.rb new file mode 100644 index 00000000..ceccdcf6 --- /dev/null +++ b/lib/openai/models/responses/response_image_gen_call_completed_event.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseImageGenCallCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the image generation item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.image_generation_call.completed'. + # + # @return [Symbol, :"response.image_generation_call.completed"] + required :type, const: :"response.image_generation_call.completed" + + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.image_generation_call.completed") + # Emitted when an image generation tool call has completed and the final image is + # available. + # + # @param item_id [String] The unique identifier of the image generation item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.image_generation_call.completed"] The type of the event. Always 'response.image_generation_call.completed'. + end + end + end +end diff --git a/lib/openai/models/responses/response_image_gen_call_generating_event.rb b/lib/openai/models/responses/response_image_gen_call_generating_event.rb new file mode 100644 index 00000000..d8f9683d --- /dev/null +++ b/lib/openai/models/responses/response_image_gen_call_generating_event.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseImageGenCallGeneratingEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the image generation item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of the image generation item being processed. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.image_generation_call.generating'. + # + # @return [Symbol, :"response.image_generation_call.generating"] + required :type, const: :"response.image_generation_call.generating" + + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.image_generation_call.generating") + # Emitted when an image generation tool call is actively generating an image + # (intermediate state). + # + # @param item_id [String] The unique identifier of the image generation item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of the image generation item being processed. + # + # @param type [Symbol, :"response.image_generation_call.generating"] The type of the event. Always 'response.image_generation_call.generating'. + end + end + end +end diff --git a/lib/openai/models/responses/response_image_gen_call_in_progress_event.rb b/lib/openai/models/responses/response_image_gen_call_in_progress_event.rb new file mode 100644 index 00000000..83d9ac0a --- /dev/null +++ b/lib/openai/models/responses/response_image_gen_call_in_progress_event.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseImageGenCallInProgressEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the image generation item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of the image generation item being processed. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.image_generation_call.in_progress'. + # + # @return [Symbol, :"response.image_generation_call.in_progress"] + required :type, const: :"response.image_generation_call.in_progress" + + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.image_generation_call.in_progress") + # Emitted when an image generation tool call is in progress. + # + # @param item_id [String] The unique identifier of the image generation item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of the image generation item being processed. + # + # @param type [Symbol, :"response.image_generation_call.in_progress"] The type of the event. Always 'response.image_generation_call.in_progress'. + end + end + end +end diff --git a/lib/openai/models/responses/response_image_gen_call_partial_image_event.rb b/lib/openai/models/responses/response_image_gen_call_partial_image_event.rb new file mode 100644 index 00000000..36a2c557 --- /dev/null +++ b/lib/openai/models/responses/response_image_gen_call_partial_image_event.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseImageGenCallPartialImageEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the image generation item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute partial_image_b64 + # Base64-encoded partial image data, suitable for rendering as an image. + # + # @return [String] + required :partial_image_b64, String + + # @!attribute partial_image_index + # 0-based index for the partial image (backend is 1-based, but this is 0-based for + # the user). + # + # @return [Integer] + required :partial_image_index, Integer + + # @!attribute sequence_number + # The sequence number of the image generation item being processed. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.image_generation_call.partial_image'. + # + # @return [Symbol, :"response.image_generation_call.partial_image"] + required :type, const: :"response.image_generation_call.partial_image" + + # @!method initialize(item_id:, output_index:, partial_image_b64:, partial_image_index:, sequence_number:, type: :"response.image_generation_call.partial_image") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent} for more + # details. + # + # Emitted when a partial image is available during image generation streaming. + # + # @param item_id [String] The unique identifier of the image generation item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param partial_image_b64 [String] Base64-encoded partial image data, suitable for rendering as an image. + # + # @param partial_image_index [Integer] 0-based index for the partial image (backend is 1-based, but this is 0-based for + # + # @param sequence_number [Integer] The sequence number of the image generation item being processed. + # + # @param type [Symbol, :"response.image_generation_call.partial_image"] The type of the event. Always 'response.image_generation_call.partial_image'. + end + end + end +end diff --git a/lib/openai/models/responses/response_in_progress_event.rb b/lib/openai/models/responses/response_in_progress_event.rb index 788e9ea6..775c9b0c 100644 --- a/lib/openai/models/responses/response_in_progress_event.rb +++ b/lib/openai/models/responses/response_in_progress_event.rb @@ -7,22 +7,30 @@ class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that is in progress. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.in_progress`. # # @return [Symbol, :"response.in_progress"] required :type, const: :"response.in_progress" - # @!method initialize(response:, type: :"response.in_progress") + # @!method initialize(response:, sequence_number:, type: :"response.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInProgressEvent} for more details. + # {OpenAI::Models::Responses::ResponseInProgressEvent} for more details. # # Emitted when the response is in progress. # - # @param response [OpenAI::Responses::Response] The response that is in progress. + # @param response [OpenAI::Models::Responses::Response] The response that is in progress. + # + # @param sequence_number [Integer] The sequence number of this event. # # @param type [Symbol, :"response.in_progress"] The type of the event. Always `response.in_progress`. end diff --git a/lib/openai/models/responses/response_incomplete_event.rb b/lib/openai/models/responses/response_incomplete_event.rb index d33b32fc..bd1402da 100644 --- a/lib/openai/models/responses/response_incomplete_event.rb +++ b/lib/openai/models/responses/response_incomplete_event.rb @@ -7,22 +7,30 @@ class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # @!attribute response # The response that was incomplete. # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] required :response, -> { OpenAI::Responses::Response } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.incomplete`. # # @return [Symbol, :"response.incomplete"] required :type, const: :"response.incomplete" - # @!method initialize(response:, type: :"response.incomplete") + # @!method initialize(response:, sequence_number:, type: :"response.incomplete") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseIncompleteEvent} for more details. + # {OpenAI::Models::Responses::ResponseIncompleteEvent} for more details. # # An event that is emitted when a response finishes as incomplete. # - # @param response [OpenAI::Responses::Response] The response that was incomplete. + # @param response [OpenAI::Models::Responses::Response] The response that was incomplete. + # + # @param sequence_number [Integer] The sequence number of this event. # # @param type [Symbol, :"response.incomplete"] The type of the event. Always `response.incomplete`. end diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index d403a4ca..ae4a6908 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -13,7 +13,7 @@ class ResponseInputAudio < OpenAI::Internal::Type::BaseModel # @!attribute format_ # The format of the audio data. Currently supported formats are `mp3` and `wav`. # - # @return [Symbol, OpenAI::Responses::ResponseInputAudio::Format] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] required :format_, enum: -> { OpenAI::Responses::ResponseInputAudio::Format }, api_name: :format # @!attribute type @@ -24,19 +24,19 @@ class ResponseInputAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, format_:, type: :input_audio) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputAudio} for more details. + # {OpenAI::Models::Responses::ResponseInputAudio} for more details. # # An audio input to the model. # # @param data [String] Base64-encoded audio data. # - # @param format_ [Symbol, OpenAI::Responses::ResponseInputAudio::Format] The format of the audio data. Currently supported formats are `mp3` and + # @param format_ [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] The format of the audio data. Currently supported formats are `mp3` and # # @param type [Symbol, :input_audio] The type of the input item. Always `input_audio`. # The format of the audio data. Currently supported formats are `mp3` and `wav`. # - # @see OpenAI::Responses::ResponseInputAudio#format_ + # @see OpenAI::Models::Responses::ResponseInputAudio#format_ module Format extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index d518271c..59c6970e 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -19,17 +19,7 @@ module ResponseInputContent variant :input_file, -> { OpenAI::Responses::ResponseInputFile } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseInputText, OpenAI::Responses::ResponseInputImage, OpenAI::Responses::ResponseInputFile)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - end - end + # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] end end end diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index 3c992986..229e942a 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -30,7 +30,7 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # @!method initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputFile} for more details. + # {OpenAI::Models::Responses::ResponseInputFile} for more details. # # A file input to the model. # diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index 242f378e..261bd7d6 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -8,7 +8,7 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. # - # @return [Symbol, OpenAI::Responses::ResponseInputImage::Detail] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] required :detail, enum: -> { OpenAI::Responses::ResponseInputImage::Detail } # @!attribute type @@ -32,12 +32,12 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # @!method initialize(detail:, file_id: nil, image_url: nil, type: :input_image) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputImage} for more details. + # {OpenAI::Models::Responses::ResponseInputImage} for more details. # # An image input to the model. Learn about # [image inputs](https://platform.openai.com/docs/guides/vision). # - # @param detail [Symbol, OpenAI::Responses::ResponseInputImage::Detail] The detail level of the image to be sent to the model. One of `high`, `low`, or + # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] The detail level of the image to be sent to the model. One of `high`, `low`, or # # @param file_id [String, nil] The ID of the file to be sent to the model. # @@ -48,7 +48,7 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. # - # @see OpenAI::Responses::ResponseInputImage#detail + # @see OpenAI::Models::Responses::ResponseInputImage#detail module Detail extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index 62cf20ee..a8f469fa 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -56,6 +56,30 @@ module ResponseInputItem # [managing context](https://platform.openai.com/docs/guides/conversation-state). variant :reasoning, -> { OpenAI::Responses::ResponseReasoningItem } + # An image generation request made by the model. + variant :image_generation_call, -> { OpenAI::Responses::ResponseInputItem::ImageGenerationCall } + + # A tool call to run code. + variant :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } + + # A tool call to run a command on the local shell. + variant :local_shell_call, -> { OpenAI::Responses::ResponseInputItem::LocalShellCall } + + # The output of a local shell tool call. + variant :local_shell_call_output, -> { OpenAI::Responses::ResponseInputItem::LocalShellCallOutput } + + # A list of tools available on an MCP server. + variant :mcp_list_tools, -> { OpenAI::Responses::ResponseInputItem::McpListTools } + + # A request for human approval of a tool invocation. + variant :mcp_approval_request, -> { OpenAI::Responses::ResponseInputItem::McpApprovalRequest } + + # A response to an MCP approval request. + variant :mcp_approval_response, -> { OpenAI::Responses::ResponseInputItem::McpApprovalResponse } + + # An invocation of a tool on an MCP server. + variant :mcp_call, -> { OpenAI::Responses::ResponseInputItem::McpCall } + # An internal identifier for an item to reference. variant :item_reference, -> { OpenAI::Responses::ResponseInputItem::ItemReference } @@ -64,7 +88,7 @@ class Message < OpenAI::Internal::Type::BaseModel # A list of one or many input items to the model, containing different content # types. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent] @@ -73,41 +97,41 @@ class Message < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the message input. One of `user`, `system`, or `developer`. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::Message::Role] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] required :role, enum: -> { OpenAI::Responses::ResponseInputItem::Message::Role } # @!attribute status # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::Message::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseInputItem::Message::Status } # @!attribute type # The type of the message input. Always set to `message`. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::Message::Type, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type, nil] optional :type, enum: -> { OpenAI::Responses::ResponseInputItem::Message::Type } # @!method initialize(content:, role:, status: nil, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::Message} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::Message} for more details. # # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take # precedence over instructions given with the `user` role. # - # @param content [Array] A list of one or many input items to the model, containing different content + # @param content [Array] A list of one or many input items to the model, containing different content # - # @param role [Symbol, OpenAI::Responses::ResponseInputItem::Message::Role] The role of the message input. One of `user`, `system`, or `developer`. + # @param role [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] The role of the message input. One of `user`, `system`, or `developer`. # - # @param status [Symbol, OpenAI::Responses::ResponseInputItem::Message::Status] The status of item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] The status of item. One of `in_progress`, `completed`, or # - # @param type [Symbol, OpenAI::Responses::ResponseInputItem::Message::Type] The type of the message input. Always set to `message`. + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] The type of the message input. Always set to `message`. # The role of the message input. One of `user`, `system`, or `developer`. # - # @see OpenAI::Responses::ResponseInputItem::Message#role + # @see OpenAI::Models::Responses::ResponseInputItem::Message#role module Role extend OpenAI::Internal::Type::Enum @@ -122,7 +146,7 @@ module Role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseInputItem::Message#status + # @see OpenAI::Models::Responses::ResponseInputItem::Message#status module Status extend OpenAI::Internal::Type::Enum @@ -136,7 +160,7 @@ module Status # The type of the message input. Always set to `message`. # - # @see OpenAI::Responses::ResponseInputItem::Message#type + # @see OpenAI::Models::Responses::ResponseInputItem::Message#type module Type extend OpenAI::Internal::Type::Enum @@ -157,7 +181,7 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # @!attribute output # A computer screenshot image used with the computer use tool. # - # @return [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] + # @return [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] required :output, -> { OpenAI::Responses::ResponseComputerToolCallOutputScreenshot } # @!attribute type @@ -176,7 +200,7 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # The safety checks reported by the API that have been acknowledged by the # developer. # - # @return [Array, nil] + # @return [Array, nil] optional :acknowledged_safety_checks, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] @@ -187,26 +211,27 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status }, nil?: true # @!method initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::ComputerCallOutput} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput} for more + # details. # # The output of a computer tool call. # # @param call_id [String] The ID of the computer tool call that produced the output. # - # @param output [OpenAI::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. + # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] A computer screenshot image used with the computer use tool. # # @param id [String, nil] The ID of the computer tool call output. # - # @param acknowledged_safety_checks [Array, nil] The safety checks reported by the API that have been acknowledged by the develop + # @param acknowledged_safety_checks [Array, nil] The safety checks reported by the API that have been acknowledged by the develop # - # @param status [Symbol, OpenAI::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] The status of the message input. One of `in_progress`, `completed`, or `incomple + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] The status of the message input. One of `in_progress`, `completed`, or `incomple # # @param type [Symbol, :computer_call_output] The type of the computer tool call output. Always `computer_call_output`. @@ -242,7 +267,7 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @see OpenAI::Responses::ResponseInputItem::ComputerCallOutput#status + # @see OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput#status module Status extend OpenAI::Internal::Type::Enum @@ -285,14 +310,15 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status }, nil?: true # @!method initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputItem::FunctionCallOutput} for more details. + # {OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput} for more + # details. # # The output of a function tool call. # @@ -302,14 +328,190 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # # @param id [String, nil] The unique ID of the function tool call output. Populated when this item is retu # - # @param status [Symbol, OpenAI::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. Popu + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. Popu # # @param type [Symbol, :function_call_output] The type of the function tool call output. Always `function_call_output`. # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseInputItem::FunctionCallOutput#status + # @see OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the image generation call. + # + # @return [String] + required :id, String + + # @!attribute result + # The generated image encoded in base64. + # + # @return [String, nil] + required :result, String, nil?: true + + # @!attribute status + # The status of the image generation call. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status } + + # @!attribute type + # The type of the image generation call. Always `image_generation_call`. + # + # @return [Symbol, :image_generation_call] + required :type, const: :image_generation_call + + # @!method initialize(id:, result:, status:, type: :image_generation_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall} for more + # details. + # + # An image generation request made by the model. + # + # @param id [String] The unique ID of the image generation call. + # + # @param result [String, nil] The generated image encoded in base64. + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::Status] The status of the image generation call. + # + # @param type [Symbol, :image_generation_call] The type of the image generation call. Always `image_generation_call`. + + # The status of the image generation call. + # + # @see OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + GENERATING = :generating + FAILED = :failed + + # @!method self.values + # @return [Array] + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell call. + # + # @return [String] + required :id, String + + # @!attribute action + # Execute a shell command on the server. + # + # @return [OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::Action] + required :action, -> { OpenAI::Responses::ResponseInputItem::LocalShellCall::Action } + + # @!attribute call_id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :call_id, String + + # @!attribute status + # The status of the local shell call. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseInputItem::LocalShellCall::Status } + + # @!attribute type + # The type of the local shell call. Always `local_shell_call`. + # + # @return [Symbol, :local_shell_call] + required :type, const: :local_shell_call + + # @!method initialize(id:, action:, call_id:, status:, type: :local_shell_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::LocalShellCall} for more details. + # + # A tool call to run a command on the local shell. + # + # @param id [String] The unique ID of the local shell call. + # + # @param action [OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::Action] Execute a shell command on the server. + # + # @param call_id [String] The unique ID of the local shell tool call generated by the model. + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::Status] The status of the local shell call. + # + # @param type [Symbol, :local_shell_call] The type of the local shell call. Always `local_shell_call`. + + # @see OpenAI::Models::Responses::ResponseInputItem::LocalShellCall#action + class Action < OpenAI::Internal::Type::BaseModel + # @!attribute command + # The command to run. + # + # @return [Array] + required :command, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute env + # Environment variables to set for the command. + # + # @return [Hash{Symbol=>String}] + required :env, OpenAI::Internal::Type::HashOf[String] + + # @!attribute type + # The type of the local shell action. Always `exec`. + # + # @return [Symbol, :exec] + required :type, const: :exec + + # @!attribute timeout_ms + # Optional timeout in milliseconds for the command. + # + # @return [Integer, nil] + optional :timeout_ms, Integer, nil?: true + + # @!attribute user + # Optional user to run the command as. + # + # @return [String, nil] + optional :user, String, nil?: true + + # @!attribute working_directory + # Optional working directory to run the command in. + # + # @return [String, nil] + optional :working_directory, String, nil?: true + + # @!method initialize(command:, env:, timeout_ms: nil, user: nil, working_directory: nil, type: :exec) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::Action} for more + # details. + # + # Execute a shell command on the server. + # + # @param command [Array] The command to run. + # + # @param env [Hash{Symbol=>String}] Environment variables to set for the command. + # + # @param timeout_ms [Integer, nil] Optional timeout in milliseconds for the command. + # + # @param user [String, nil] Optional user to run the command as. + # + # @param working_directory [String, nil] Optional working directory to run the command in. + # + # @param type [Symbol, :exec] The type of the local shell action. Always `exec`. + end + + # The status of the local shell call. + # + # @see OpenAI::Models::Responses::ResponseInputItem::LocalShellCall#status module Status extend OpenAI::Internal::Type::Enum @@ -322,6 +524,317 @@ module Status end end + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :id, String + + # @!attribute output + # A JSON string of the output of the local shell tool call. + # + # @return [String] + required :output, String + + # @!attribute type + # The type of the local shell tool call output. Always `local_shell_call_output`. + # + # @return [Symbol, :local_shell_call_output] + required :type, const: :local_shell_call_output + + # @!attribute status + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::Status, nil] + optional :status, + enum: -> { OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status }, + nil?: true + + # @!method initialize(id:, output:, status: nil, type: :local_shell_call_output) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput} for more + # details. + # + # The output of a local shell tool call. + # + # @param id [String] The unique ID of the local shell tool call generated by the model. + # + # @param output [String] A JSON string of the output of the local shell tool call. + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @param type [Symbol, :local_shell_call_output] The type of the local shell tool call output. Always `local_shell_call_output`. + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @see OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the list. + # + # @return [String] + required :id, String + + # @!attribute server_label + # The label of the MCP server. + # + # @return [String] + required :server_label, String + + # @!attribute tools + # The tools available on the server. + # + # @return [Array] + required :tools, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseInputItem::McpListTools::Tool] + } + + # @!attribute type + # The type of the item. Always `mcp_list_tools`. + # + # @return [Symbol, :mcp_list_tools] + required :type, const: :mcp_list_tools + + # @!attribute error + # Error message if the server could not list tools. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!method initialize(id:, server_label:, tools:, error: nil, type: :mcp_list_tools) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::McpListTools} for more details. + # + # A list of tools available on an MCP server. + # + # @param id [String] The unique ID of the list. + # + # @param server_label [String] The label of the MCP server. + # + # @param tools [Array] The tools available on the server. + # + # @param error [String, nil] Error message if the server could not list tools. + # + # @param type [Symbol, :mcp_list_tools] The type of the item. Always `mcp_list_tools`. + + class Tool < OpenAI::Internal::Type::BaseModel + # @!attribute input_schema + # The JSON schema describing the tool's input. + # + # @return [Object] + required :input_schema, OpenAI::Internal::Type::Unknown + + # @!attribute name + # The name of the tool. + # + # @return [String] + required :name, String + + # @!attribute annotations + # Additional annotations about the tool. + # + # @return [Object, nil] + optional :annotations, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute description + # The description of the tool. + # + # @return [String, nil] + optional :description, String, nil?: true + + # @!method initialize(input_schema:, name:, annotations: nil, description: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::McpListTools::Tool} for more + # details. + # + # A tool available on an MCP server. + # + # @param input_schema [Object] The JSON schema describing the tool's input. + # + # @param name [String] The name of the tool. + # + # @param annotations [Object, nil] Additional annotations about the tool. + # + # @param description [String, nil] The description of the tool. + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the approval request. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of arguments for the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool to run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server making the request. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_approval_request`. + # + # @return [Symbol, :mcp_approval_request] + required :type, const: :mcp_approval_request + + # @!method initialize(id:, arguments:, name:, server_label:, type: :mcp_approval_request) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest} for more + # details. + # + # A request for human approval of a tool invocation. + # + # @param id [String] The unique ID of the approval request. + # + # @param arguments [String] A JSON string of arguments for the tool. + # + # @param name [String] The name of the tool to run. + # + # @param server_label [String] The label of the MCP server making the request. + # + # @param type [Symbol, :mcp_approval_request] The type of the item. Always `mcp_approval_request`. + end + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + # @!attribute approval_request_id + # The ID of the approval request being answered. + # + # @return [String] + required :approval_request_id, String + + # @!attribute approve + # Whether the request was approved. + # + # @return [Boolean] + required :approve, OpenAI::Internal::Type::Boolean + + # @!attribute type + # The type of the item. Always `mcp_approval_response`. + # + # @return [Symbol, :mcp_approval_response] + required :type, const: :mcp_approval_response + + # @!attribute id + # The unique ID of the approval response + # + # @return [String, nil] + optional :id, String, nil?: true + + # @!attribute reason + # Optional reason for the decision. + # + # @return [String, nil] + optional :reason, String, nil?: true + + # @!method initialize(approval_request_id:, approve:, id: nil, reason: nil, type: :mcp_approval_response) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse} for more + # details. + # + # A response to an MCP approval request. + # + # @param approval_request_id [String] The ID of the approval request being answered. + # + # @param approve [Boolean] Whether the request was approved. + # + # @param id [String, nil] The unique ID of the approval response + # + # @param reason [String, nil] Optional reason for the decision. + # + # @param type [Symbol, :mcp_approval_response] The type of the item. Always `mcp_approval_response`. + end + + class McpCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the tool call. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of the arguments passed to the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool that was run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server running the tool. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_call`. + # + # @return [Symbol, :mcp_call] + required :type, const: :mcp_call + + # @!attribute error + # The error from the tool call, if any. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!attribute output + # The output from the tool call. + # + # @return [String, nil] + optional :output, String, nil?: true + + # @!method initialize(id:, arguments:, name:, server_label:, error: nil, output: nil, type: :mcp_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseInputItem::McpCall} for more details. + # + # An invocation of a tool on an MCP server. + # + # @param id [String] The unique ID of the tool call. + # + # @param arguments [String] A JSON string of the arguments passed to the tool. + # + # @param name [String] The name of the tool that was run. + # + # @param server_label [String] The label of the MCP server running the tool. + # + # @param error [String, nil] The error from the tool call, if any. + # + # @param output [String, nil] The output from the tool call. + # + # @param type [Symbol, :mcp_call] The type of the item. Always `mcp_call`. + end + class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute id # The ID of the item to reference. @@ -332,7 +845,7 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of item to reference. Always `item_reference`. # - # @return [Symbol, OpenAI::Responses::ResponseInputItem::ItemReference::Type, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type, nil] optional :type, enum: -> { OpenAI::Responses::ResponseInputItem::ItemReference::Type }, nil?: true # @!method initialize(id:, type: nil) @@ -340,11 +853,11 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # # @param id [String] The ID of the item to reference. # - # @param type [Symbol, OpenAI::Responses::ResponseInputItem::ItemReference::Type, nil] The type of item to reference. Always `item_reference`. + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::ItemReference::Type, nil] The type of item to reference. Always `item_reference`. # The type of item to reference. Always `item_reference`. # - # @see OpenAI::Responses::ResponseInputItem::ItemReference#type + # @see OpenAI::Models::Responses::ResponseInputItem::ItemReference#type module Type extend OpenAI::Internal::Type::Enum @@ -356,25 +869,7 @@ module Type end # @!method self.variants - # @return [Array(OpenAI::Responses::EasyInputMessage, OpenAI::Responses::ResponseInputItem::Message, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Responses::ResponseReasoningItem, OpenAI::Responses::ResponseInputItem::ItemReference)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Responses::ResponseInputItem::Message, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Responses::ResponseReasoningItem, - OpenAI::Responses::ResponseInputItem::ItemReference - ) - end - end + # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCall, OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseInputItem::McpListTools, OpenAI::Models::Responses::ResponseInputItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseInputItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseInputItem::McpCall, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] end end end diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 6bad6d0f..90fade2d 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -14,7 +14,7 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # A list of one or many input items to the model, containing different content # types. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseInputContent] @@ -23,39 +23,39 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # @!attribute role # The role of the message input. One of `user`, `system`, or `developer`. # - # @return [Symbol, OpenAI::Responses::ResponseInputMessageItem::Role] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] required :role, enum: -> { OpenAI::Responses::ResponseInputMessageItem::Role } # @!attribute status # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseInputMessageItem::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseInputMessageItem::Status } # @!attribute type # The type of the message input. Always set to `message`. # - # @return [Symbol, OpenAI::Responses::ResponseInputMessageItem::Type, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type, nil] optional :type, enum: -> { OpenAI::Responses::ResponseInputMessageItem::Type } # @!method initialize(id:, content:, role:, status: nil, type: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseInputMessageItem} for more details. + # {OpenAI::Models::Responses::ResponseInputMessageItem} for more details. # # @param id [String] The unique ID of the message input. # - # @param content [Array] A list of one or many input items to the model, containing different content + # @param content [Array] A list of one or many input items to the model, containing different content # - # @param role [Symbol, OpenAI::Responses::ResponseInputMessageItem::Role] The role of the message input. One of `user`, `system`, or `developer`. + # @param role [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] The role of the message input. One of `user`, `system`, or `developer`. # - # @param status [Symbol, OpenAI::Responses::ResponseInputMessageItem::Status] The status of item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] The status of item. One of `in_progress`, `completed`, or # - # @param type [Symbol, OpenAI::Responses::ResponseInputMessageItem::Type] The type of the message input. Always set to `message`. + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] The type of the message input. Always set to `message`. # The role of the message input. One of `user`, `system`, or `developer`. # - # @see OpenAI::Responses::ResponseInputMessageItem#role + # @see OpenAI::Models::Responses::ResponseInputMessageItem#role module Role extend OpenAI::Internal::Type::Enum @@ -70,7 +70,7 @@ module Role # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseInputMessageItem#status + # @see OpenAI::Models::Responses::ResponseInputMessageItem#status module Status extend OpenAI::Internal::Type::Enum @@ -84,7 +84,7 @@ module Status # The type of the message input. Always set to `message`. # - # @see OpenAI::Responses::ResponseInputMessageItem#type + # @see OpenAI::Models::Responses::ResponseInputMessageItem#type module Type extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index fe6001ab..4b501f36 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -34,23 +34,515 @@ module ResponseItem variant :function_call_output, -> { OpenAI::Responses::ResponseFunctionToolCallOutputItem } - # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseInputMessageItem, OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseComputerToolCallOutputItem, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCallItem, OpenAI::Responses::ResponseFunctionToolCallOutputItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseInputMessageItem, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCallItem, - OpenAI::Responses::ResponseFunctionToolCallOutputItem - ) + # An image generation request made by the model. + variant :image_generation_call, -> { OpenAI::Responses::ResponseItem::ImageGenerationCall } + + # A tool call to run code. + variant :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } + + # A tool call to run a command on the local shell. + variant :local_shell_call, -> { OpenAI::Responses::ResponseItem::LocalShellCall } + + # The output of a local shell tool call. + variant :local_shell_call_output, -> { OpenAI::Responses::ResponseItem::LocalShellCallOutput } + + # A list of tools available on an MCP server. + variant :mcp_list_tools, -> { OpenAI::Responses::ResponseItem::McpListTools } + + # A request for human approval of a tool invocation. + variant :mcp_approval_request, -> { OpenAI::Responses::ResponseItem::McpApprovalRequest } + + # A response to an MCP approval request. + variant :mcp_approval_response, -> { OpenAI::Responses::ResponseItem::McpApprovalResponse } + + # An invocation of a tool on an MCP server. + variant :mcp_call, -> { OpenAI::Responses::ResponseItem::McpCall } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the image generation call. + # + # @return [String] + required :id, String + + # @!attribute result + # The generated image encoded in base64. + # + # @return [String, nil] + required :result, String, nil?: true + + # @!attribute status + # The status of the image generation call. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseItem::ImageGenerationCall::Status } + + # @!attribute type + # The type of the image generation call. Always `image_generation_call`. + # + # @return [Symbol, :image_generation_call] + required :type, const: :image_generation_call + + # @!method initialize(id:, result:, status:, type: :image_generation_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseItem::ImageGenerationCall} for more details. + # + # An image generation request made by the model. + # + # @param id [String] The unique ID of the image generation call. + # + # @param result [String, nil] The generated image encoded in base64. + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::Status] The status of the image generation call. + # + # @param type [Symbol, :image_generation_call] The type of the image generation call. Always `image_generation_call`. + + # The status of the image generation call. + # + # @see OpenAI::Models::Responses::ResponseItem::ImageGenerationCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + GENERATING = :generating + FAILED = :failed + + # @!method self.values + # @return [Array] + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell call. + # + # @return [String] + required :id, String + + # @!attribute action + # Execute a shell command on the server. + # + # @return [OpenAI::Models::Responses::ResponseItem::LocalShellCall::Action] + required :action, -> { OpenAI::Responses::ResponseItem::LocalShellCall::Action } + + # @!attribute call_id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :call_id, String + + # @!attribute status + # The status of the local shell call. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseItem::LocalShellCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseItem::LocalShellCall::Status } + + # @!attribute type + # The type of the local shell call. Always `local_shell_call`. + # + # @return [Symbol, :local_shell_call] + required :type, const: :local_shell_call + + # @!method initialize(id:, action:, call_id:, status:, type: :local_shell_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseItem::LocalShellCall} for more details. + # + # A tool call to run a command on the local shell. + # + # @param id [String] The unique ID of the local shell call. + # + # @param action [OpenAI::Models::Responses::ResponseItem::LocalShellCall::Action] Execute a shell command on the server. + # + # @param call_id [String] The unique ID of the local shell tool call generated by the model. + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseItem::LocalShellCall::Status] The status of the local shell call. + # + # @param type [Symbol, :local_shell_call] The type of the local shell call. Always `local_shell_call`. + + # @see OpenAI::Models::Responses::ResponseItem::LocalShellCall#action + class Action < OpenAI::Internal::Type::BaseModel + # @!attribute command + # The command to run. + # + # @return [Array] + required :command, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute env + # Environment variables to set for the command. + # + # @return [Hash{Symbol=>String}] + required :env, OpenAI::Internal::Type::HashOf[String] + + # @!attribute type + # The type of the local shell action. Always `exec`. + # + # @return [Symbol, :exec] + required :type, const: :exec + + # @!attribute timeout_ms + # Optional timeout in milliseconds for the command. + # + # @return [Integer, nil] + optional :timeout_ms, Integer, nil?: true + + # @!attribute user + # Optional user to run the command as. + # + # @return [String, nil] + optional :user, String, nil?: true + + # @!attribute working_directory + # Optional working directory to run the command in. + # + # @return [String, nil] + optional :working_directory, String, nil?: true + + # @!method initialize(command:, env:, timeout_ms: nil, user: nil, working_directory: nil, type: :exec) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseItem::LocalShellCall::Action} for more + # details. + # + # Execute a shell command on the server. + # + # @param command [Array] The command to run. + # + # @param env [Hash{Symbol=>String}] Environment variables to set for the command. + # + # @param timeout_ms [Integer, nil] Optional timeout in milliseconds for the command. + # + # @param user [String, nil] Optional user to run the command as. + # + # @param working_directory [String, nil] Optional working directory to run the command in. + # + # @param type [Symbol, :exec] The type of the local shell action. Always `exec`. + end + + # The status of the local shell call. + # + # @see OpenAI::Models::Responses::ResponseItem::LocalShellCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] end end + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :id, String + + # @!attribute output + # A JSON string of the output of the local shell tool call. + # + # @return [String] + required :output, String + + # @!attribute type + # The type of the local shell tool call output. Always `local_shell_call_output`. + # + # @return [Symbol, :local_shell_call_output] + required :type, const: :local_shell_call_output + + # @!attribute status + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::Status, nil] + optional :status, + enum: -> { + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status + }, + nil?: true + + # @!method initialize(id:, output:, status: nil, type: :local_shell_call_output) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput} for more + # details. + # + # The output of a local shell tool call. + # + # @param id [String] The unique ID of the local shell tool call generated by the model. + # + # @param output [String] A JSON string of the output of the local shell tool call. + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::Status, nil] The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @param type [Symbol, :local_shell_call_output] The type of the local shell tool call output. Always `local_shell_call_output`. + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + # + # @see OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the list. + # + # @return [String] + required :id, String + + # @!attribute server_label + # The label of the MCP server. + # + # @return [String] + required :server_label, String + + # @!attribute tools + # The tools available on the server. + # + # @return [Array] + required :tools, + -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseItem::McpListTools::Tool] } + + # @!attribute type + # The type of the item. Always `mcp_list_tools`. + # + # @return [Symbol, :mcp_list_tools] + required :type, const: :mcp_list_tools + + # @!attribute error + # Error message if the server could not list tools. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!method initialize(id:, server_label:, tools:, error: nil, type: :mcp_list_tools) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseItem::McpListTools} for more details. + # + # A list of tools available on an MCP server. + # + # @param id [String] The unique ID of the list. + # + # @param server_label [String] The label of the MCP server. + # + # @param tools [Array] The tools available on the server. + # + # @param error [String, nil] Error message if the server could not list tools. + # + # @param type [Symbol, :mcp_list_tools] The type of the item. Always `mcp_list_tools`. + + class Tool < OpenAI::Internal::Type::BaseModel + # @!attribute input_schema + # The JSON schema describing the tool's input. + # + # @return [Object] + required :input_schema, OpenAI::Internal::Type::Unknown + + # @!attribute name + # The name of the tool. + # + # @return [String] + required :name, String + + # @!attribute annotations + # Additional annotations about the tool. + # + # @return [Object, nil] + optional :annotations, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute description + # The description of the tool. + # + # @return [String, nil] + optional :description, String, nil?: true + + # @!method initialize(input_schema:, name:, annotations: nil, description: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseItem::McpListTools::Tool} for more details. + # + # A tool available on an MCP server. + # + # @param input_schema [Object] The JSON schema describing the tool's input. + # + # @param name [String] The name of the tool. + # + # @param annotations [Object, nil] Additional annotations about the tool. + # + # @param description [String, nil] The description of the tool. + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the approval request. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of arguments for the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool to run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server making the request. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_approval_request`. + # + # @return [Symbol, :mcp_approval_request] + required :type, const: :mcp_approval_request + + # @!method initialize(id:, arguments:, name:, server_label:, type: :mcp_approval_request) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseItem::McpApprovalRequest} for more details. + # + # A request for human approval of a tool invocation. + # + # @param id [String] The unique ID of the approval request. + # + # @param arguments [String] A JSON string of arguments for the tool. + # + # @param name [String] The name of the tool to run. + # + # @param server_label [String] The label of the MCP server making the request. + # + # @param type [Symbol, :mcp_approval_request] The type of the item. Always `mcp_approval_request`. + end + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the approval response + # + # @return [String] + required :id, String + + # @!attribute approval_request_id + # The ID of the approval request being answered. + # + # @return [String] + required :approval_request_id, String + + # @!attribute approve + # Whether the request was approved. + # + # @return [Boolean] + required :approve, OpenAI::Internal::Type::Boolean + + # @!attribute type + # The type of the item. Always `mcp_approval_response`. + # + # @return [Symbol, :mcp_approval_response] + required :type, const: :mcp_approval_response + + # @!attribute reason + # Optional reason for the decision. + # + # @return [String, nil] + optional :reason, String, nil?: true + + # @!method initialize(id:, approval_request_id:, approve:, reason: nil, type: :mcp_approval_response) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseItem::McpApprovalResponse} for more details. + # + # A response to an MCP approval request. + # + # @param id [String] The unique ID of the approval response + # + # @param approval_request_id [String] The ID of the approval request being answered. + # + # @param approve [Boolean] Whether the request was approved. + # + # @param reason [String, nil] Optional reason for the decision. + # + # @param type [Symbol, :mcp_approval_response] The type of the item. Always `mcp_approval_response`. + end + + class McpCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the tool call. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of the arguments passed to the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool that was run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server running the tool. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_call`. + # + # @return [Symbol, :mcp_call] + required :type, const: :mcp_call + + # @!attribute error + # The error from the tool call, if any. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!attribute output + # The output from the tool call. + # + # @return [String, nil] + optional :output, String, nil?: true + + # @!method initialize(id:, arguments:, name:, server_label:, error: nil, output: nil, type: :mcp_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseItem::McpCall} for more details. + # + # An invocation of a tool on an MCP server. + # + # @param id [String] The unique ID of the tool call. + # + # @param arguments [String] A JSON string of the arguments passed to the tool. + # + # @param name [String] The name of the tool that was run. + # + # @param server_label [String] The label of the MCP server running the tool. + # + # @param error [String, nil] The error from the tool call, if any. + # + # @param output [String, nil] The output from the tool call. + # + # @param type [Symbol, :mcp_call] The type of the item. Always `mcp_call`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem, OpenAI::Models::Responses::ResponseItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseItem::LocalShellCall, OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput, OpenAI::Models::Responses::ResponseItem::McpListTools, OpenAI::Models::Responses::ResponseItem::McpApprovalRequest, OpenAI::Models::Responses::ResponseItem::McpApprovalResponse, OpenAI::Models::Responses::ResponseItem::McpCall)] end end end diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index 6d7d8e30..ea2b6f1b 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -7,7 +7,7 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @!attribute data # A list of items used to generate this response. # - # @return [Array] + # @return [Array] required :data, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseItem] } # @!attribute first_id @@ -37,7 +37,7 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, first_id:, has_more:, last_id:, object: :list) # A list of Response items. # - # @param data [Array] A list of items used to generate this response. + # @param data [Array] A list of items used to generate this response. # # @param first_id [String] The ID of the first item in the list. # diff --git a/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb b/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb new file mode 100644 index 00000000..0c734e96 --- /dev/null +++ b/lib/openai/models/responses/response_mcp_call_arguments_delta_event.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel + # @!attribute delta + # The partial update to the arguments for the MCP tool call. + # + # @return [Object] + required :delta, OpenAI::Internal::Type::Unknown + + # @!attribute item_id + # The unique identifier of the MCP tool call item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_call.arguments_delta'. + # + # @return [Symbol, :"response.mcp_call.arguments_delta"] + required :type, const: :"response.mcp_call.arguments_delta" + + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call.arguments_delta") + # Emitted when there is a delta (partial update) to the arguments of an MCP tool + # call. + # + # @param delta [Object] The partial update to the arguments for the MCP tool call. + # + # @param item_id [String] The unique identifier of the MCP tool call item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.mcp_call.arguments_delta"] The type of the event. Always 'response.mcp_call.arguments_delta'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb b/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb new file mode 100644 index 00000000..f8f70329 --- /dev/null +++ b/lib/openai/models/responses/response_mcp_call_arguments_done_event.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute arguments + # The finalized arguments for the MCP tool call. + # + # @return [Object] + required :arguments, OpenAI::Internal::Type::Unknown + + # @!attribute item_id + # The unique identifier of the MCP tool call item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_call.arguments_done'. + # + # @return [Symbol, :"response.mcp_call.arguments_done"] + required :type, const: :"response.mcp_call.arguments_done" + + # @!method initialize(arguments:, item_id:, output_index:, sequence_number:, type: :"response.mcp_call.arguments_done") + # Emitted when the arguments for an MCP tool call are finalized. + # + # @param arguments [Object] The finalized arguments for the MCP tool call. + # + # @param item_id [String] The unique identifier of the MCP tool call item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.mcp_call.arguments_done"] The type of the event. Always 'response.mcp_call.arguments_done'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_call_completed_event.rb b/lib/openai/models/responses/response_mcp_call_completed_event.rb new file mode 100644 index 00000000..082474fd --- /dev/null +++ b/lib/openai/models/responses/response_mcp_call_completed_event.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_call.completed'. + # + # @return [Symbol, :"response.mcp_call.completed"] + required :type, const: :"response.mcp_call.completed" + + # @!method initialize(sequence_number:, type: :"response.mcp_call.completed") + # Emitted when an MCP tool call has completed successfully. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.mcp_call.completed"] The type of the event. Always 'response.mcp_call.completed'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_call_failed_event.rb b/lib/openai/models/responses/response_mcp_call_failed_event.rb new file mode 100644 index 00000000..cca4a22d --- /dev/null +++ b/lib/openai/models/responses/response_mcp_call_failed_event.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_call.failed'. + # + # @return [Symbol, :"response.mcp_call.failed"] + required :type, const: :"response.mcp_call.failed" + + # @!method initialize(sequence_number:, type: :"response.mcp_call.failed") + # Emitted when an MCP tool call has failed. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.mcp_call.failed"] The type of the event. Always 'response.mcp_call.failed'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_call_in_progress_event.rb b/lib/openai/models/responses/response_mcp_call_in_progress_event.rb new file mode 100644 index 00000000..4d02f14f --- /dev/null +++ b/lib/openai/models/responses/response_mcp_call_in_progress_event.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpCallInProgressEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the MCP tool call item being processed. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_call.in_progress'. + # + # @return [Symbol, :"response.mcp_call.in_progress"] + required :type, const: :"response.mcp_call.in_progress" + + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.mcp_call.in_progress") + # Emitted when an MCP tool call is in progress. + # + # @param item_id [String] The unique identifier of the MCP tool call item being processed. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.mcp_call.in_progress"] The type of the event. Always 'response.mcp_call.in_progress'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb b/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb new file mode 100644 index 00000000..dd6bfdcb --- /dev/null +++ b/lib/openai/models/responses/response_mcp_list_tools_completed_event.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_list_tools.completed'. + # + # @return [Symbol, :"response.mcp_list_tools.completed"] + required :type, const: :"response.mcp_list_tools.completed" + + # @!method initialize(sequence_number:, type: :"response.mcp_list_tools.completed") + # Emitted when the list of available MCP tools has been successfully retrieved. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.mcp_list_tools.completed"] The type of the event. Always 'response.mcp_list_tools.completed'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb b/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb new file mode 100644 index 00000000..a4c2fc10 --- /dev/null +++ b/lib/openai/models/responses/response_mcp_list_tools_failed_event.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_list_tools.failed'. + # + # @return [Symbol, :"response.mcp_list_tools.failed"] + required :type, const: :"response.mcp_list_tools.failed" + + # @!method initialize(sequence_number:, type: :"response.mcp_list_tools.failed") + # Emitted when the attempt to list available MCP tools has failed. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.mcp_list_tools.failed"] The type of the event. Always 'response.mcp_list_tools.failed'. + end + end + end +end diff --git a/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb b/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb new file mode 100644 index 00000000..e931fb3b --- /dev/null +++ b/lib/openai/models/responses/response_mcp_list_tools_in_progress_event.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.mcp_list_tools.in_progress'. + # + # @return [Symbol, :"response.mcp_list_tools.in_progress"] + required :type, const: :"response.mcp_list_tools.in_progress" + + # @!method initialize(sequence_number:, type: :"response.mcp_list_tools.in_progress") + # Emitted when the system is in the process of retrieving the list of available + # MCP tools. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.mcp_list_tools.in_progress"] The type of the event. Always 'response.mcp_list_tools.in_progress'. + end + end + end +end diff --git a/lib/openai/models/responses/response_output_audio.rb b/lib/openai/models/responses/response_output_audio.rb index 8183cdda..811fed00 100644 --- a/lib/openai/models/responses/response_output_audio.rb +++ b/lib/openai/models/responses/response_output_audio.rb @@ -24,7 +24,7 @@ class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel # @!method initialize(data:, transcript:, type: :output_audio) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputAudio} for more details. + # {OpenAI::Models::Responses::ResponseOutputAudio} for more details. # # An audio output from the model. # diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 502acd49..b6fb663b 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -34,21 +34,408 @@ module ResponseOutputItem # [managing context](https://platform.openai.com/docs/guides/conversation-state). variant :reasoning, -> { OpenAI::Responses::ResponseReasoningItem } - # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ) + # An image generation request made by the model. + variant :image_generation_call, -> { OpenAI::Responses::ResponseOutputItem::ImageGenerationCall } + + # A tool call to run code. + variant :code_interpreter_call, -> { OpenAI::Responses::ResponseCodeInterpreterToolCall } + + # A tool call to run a command on the local shell. + variant :local_shell_call, -> { OpenAI::Responses::ResponseOutputItem::LocalShellCall } + + # An invocation of a tool on an MCP server. + variant :mcp_call, -> { OpenAI::Responses::ResponseOutputItem::McpCall } + + # A list of tools available on an MCP server. + variant :mcp_list_tools, -> { OpenAI::Responses::ResponseOutputItem::McpListTools } + + # A request for human approval of a tool invocation. + variant :mcp_approval_request, -> { OpenAI::Responses::ResponseOutputItem::McpApprovalRequest } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the image generation call. + # + # @return [String] + required :id, String + + # @!attribute result + # The generated image encoded in base64. + # + # @return [String, nil] + required :result, String, nil?: true + + # @!attribute status + # The status of the image generation call. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status } + + # @!attribute type + # The type of the image generation call. Always `image_generation_call`. + # + # @return [Symbol, :image_generation_call] + required :type, const: :image_generation_call + + # @!method initialize(id:, result:, status:, type: :image_generation_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall} for more + # details. + # + # An image generation request made by the model. + # + # @param id [String] The unique ID of the image generation call. + # + # @param result [String, nil] The generated image encoded in base64. + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::Status] The status of the image generation call. + # + # @param type [Symbol, :image_generation_call] The type of the image generation call. Always `image_generation_call`. + + # The status of the image generation call. + # + # @see OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + GENERATING = :generating + FAILED = :failed + + # @!method self.values + # @return [Array] + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the local shell call. + # + # @return [String] + required :id, String + + # @!attribute action + # Execute a shell command on the server. + # + # @return [OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::Action] + required :action, -> { OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action } + + # @!attribute call_id + # The unique ID of the local shell tool call generated by the model. + # + # @return [String] + required :call_id, String + + # @!attribute status + # The status of the local shell call. + # + # @return [Symbol, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::Status] + required :status, enum: -> { OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status } + + # @!attribute type + # The type of the local shell call. Always `local_shell_call`. + # + # @return [Symbol, :local_shell_call] + required :type, const: :local_shell_call + + # @!method initialize(id:, action:, call_id:, status:, type: :local_shell_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall} for more + # details. + # + # A tool call to run a command on the local shell. + # + # @param id [String] The unique ID of the local shell call. + # + # @param action [OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::Action] Execute a shell command on the server. + # + # @param call_id [String] The unique ID of the local shell tool call generated by the model. + # + # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::Status] The status of the local shell call. + # + # @param type [Symbol, :local_shell_call] The type of the local shell call. Always `local_shell_call`. + + # @see OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall#action + class Action < OpenAI::Internal::Type::BaseModel + # @!attribute command + # The command to run. + # + # @return [Array] + required :command, OpenAI::Internal::Type::ArrayOf[String] + + # @!attribute env + # Environment variables to set for the command. + # + # @return [Hash{Symbol=>String}] + required :env, OpenAI::Internal::Type::HashOf[String] + + # @!attribute type + # The type of the local shell action. Always `exec`. + # + # @return [Symbol, :exec] + required :type, const: :exec + + # @!attribute timeout_ms + # Optional timeout in milliseconds for the command. + # + # @return [Integer, nil] + optional :timeout_ms, Integer, nil?: true + + # @!attribute user + # Optional user to run the command as. + # + # @return [String, nil] + optional :user, String, nil?: true + + # @!attribute working_directory + # Optional working directory to run the command in. + # + # @return [String, nil] + optional :working_directory, String, nil?: true + + # @!method initialize(command:, env:, timeout_ms: nil, user: nil, working_directory: nil, type: :exec) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::Action} for more + # details. + # + # Execute a shell command on the server. + # + # @param command [Array] The command to run. + # + # @param env [Hash{Symbol=>String}] Environment variables to set for the command. + # + # @param timeout_ms [Integer, nil] Optional timeout in milliseconds for the command. + # + # @param user [String, nil] Optional user to run the command as. + # + # @param working_directory [String, nil] Optional working directory to run the command in. + # + # @param type [Symbol, :exec] The type of the local shell action. Always `exec`. + end + + # The status of the local shell call. + # + # @see OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall#status + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS = :in_progress + COMPLETED = :completed + INCOMPLETE = :incomplete + + # @!method self.values + # @return [Array] + end + end + + class McpCall < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the tool call. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of the arguments passed to the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool that was run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server running the tool. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_call`. + # + # @return [Symbol, :mcp_call] + required :type, const: :mcp_call + + # @!attribute error + # The error from the tool call, if any. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!attribute output + # The output from the tool call. + # + # @return [String, nil] + optional :output, String, nil?: true + + # @!method initialize(id:, arguments:, name:, server_label:, error: nil, output: nil, type: :mcp_call) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputItem::McpCall} for more details. + # + # An invocation of a tool on an MCP server. + # + # @param id [String] The unique ID of the tool call. + # + # @param arguments [String] A JSON string of the arguments passed to the tool. + # + # @param name [String] The name of the tool that was run. + # + # @param server_label [String] The label of the MCP server running the tool. + # + # @param error [String, nil] The error from the tool call, if any. + # + # @param output [String, nil] The output from the tool call. + # + # @param type [Symbol, :mcp_call] The type of the item. Always `mcp_call`. + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the list. + # + # @return [String] + required :id, String + + # @!attribute server_label + # The label of the MCP server. + # + # @return [String] + required :server_label, String + + # @!attribute tools + # The tools available on the server. + # + # @return [Array] + required :tools, + -> { + OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool] + } + + # @!attribute type + # The type of the item. Always `mcp_list_tools`. + # + # @return [Symbol, :mcp_list_tools] + required :type, const: :mcp_list_tools + + # @!attribute error + # Error message if the server could not list tools. + # + # @return [String, nil] + optional :error, String, nil?: true + + # @!method initialize(id:, server_label:, tools:, error: nil, type: :mcp_list_tools) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputItem::McpListTools} for more details. + # + # A list of tools available on an MCP server. + # + # @param id [String] The unique ID of the list. + # + # @param server_label [String] The label of the MCP server. + # + # @param tools [Array] The tools available on the server. + # + # @param error [String, nil] Error message if the server could not list tools. + # + # @param type [Symbol, :mcp_list_tools] The type of the item. Always `mcp_list_tools`. + + class Tool < OpenAI::Internal::Type::BaseModel + # @!attribute input_schema + # The JSON schema describing the tool's input. + # + # @return [Object] + required :input_schema, OpenAI::Internal::Type::Unknown + + # @!attribute name + # The name of the tool. + # + # @return [String] + required :name, String + + # @!attribute annotations + # Additional annotations about the tool. + # + # @return [Object, nil] + optional :annotations, OpenAI::Internal::Type::Unknown, nil?: true + + # @!attribute description + # The description of the tool. + # + # @return [String, nil] + optional :description, String, nil?: true + + # @!method initialize(input_schema:, name:, annotations: nil, description: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputItem::McpListTools::Tool} for more + # details. + # + # A tool available on an MCP server. + # + # @param input_schema [Object] The JSON schema describing the tool's input. + # + # @param name [String] The name of the tool. + # + # @param annotations [Object, nil] Additional annotations about the tool. + # + # @param description [String, nil] The description of the tool. end end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + # @!attribute id + # The unique ID of the approval request. + # + # @return [String] + required :id, String + + # @!attribute arguments + # A JSON string of arguments for the tool. + # + # @return [String] + required :arguments, String + + # @!attribute name + # The name of the tool to run. + # + # @return [String] + required :name, String + + # @!attribute server_label + # The label of the MCP server making the request. + # + # @return [String] + required :server_label, String + + # @!attribute type + # The type of the item. Always `mcp_approval_request`. + # + # @return [Symbol, :mcp_approval_request] + required :type, const: :mcp_approval_request + + # @!method initialize(id:, arguments:, name:, server_label:, type: :mcp_approval_request) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest} for more + # details. + # + # A request for human approval of a tool invocation. + # + # @param id [String] The unique ID of the approval request. + # + # @param arguments [String] A JSON string of arguments for the tool. + # + # @param name [String] The name of the tool to run. + # + # @param server_label [String] The label of the MCP server making the request. + # + # @param type [Symbol, :mcp_approval_request] The type of the item. Always `mcp_approval_request`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest)] end end end diff --git a/lib/openai/models/responses/response_output_item_added_event.rb b/lib/openai/models/responses/response_output_item_added_event.rb index 94383e4f..119bea71 100644 --- a/lib/openai/models/responses/response_output_item_added_event.rb +++ b/lib/openai/models/responses/response_output_item_added_event.rb @@ -7,7 +7,7 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was added. # - # @return [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] + # @return [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] required :item, union: -> { OpenAI::Responses::ResponseOutputItem } # @!attribute output_index @@ -16,22 +16,30 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.output_item.added`. # # @return [Symbol, :"response.output_item.added"] required :type, const: :"response.output_item.added" - # @!method initialize(item:, output_index:, type: :"response.output_item.added") + # @!method initialize(item:, output_index:, sequence_number:, type: :"response.output_item.added") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItemAddedEvent} for more details. + # {OpenAI::Models::Responses::ResponseOutputItemAddedEvent} for more details. # # Emitted when a new output item is added. # - # @param item [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] The output item that was added. + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] The output item that was added. # # @param output_index [Integer] The index of the output item that was added. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.output_item.added"] The type of the event. Always `response.output_item.added`. end end diff --git a/lib/openai/models/responses/response_output_item_done_event.rb b/lib/openai/models/responses/response_output_item_done_event.rb index 54d941ad..45b061b7 100644 --- a/lib/openai/models/responses/response_output_item_done_event.rb +++ b/lib/openai/models/responses/response_output_item_done_event.rb @@ -7,7 +7,7 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute item # The output item that was marked done. # - # @return [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] + # @return [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] required :item, union: -> { OpenAI::Responses::ResponseOutputItem } # @!attribute output_index @@ -16,22 +16,30 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.output_item.done`. # # @return [Symbol, :"response.output_item.done"] required :type, const: :"response.output_item.done" - # @!method initialize(item:, output_index:, type: :"response.output_item.done") + # @!method initialize(item:, output_index:, sequence_number:, type: :"response.output_item.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputItemDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseOutputItemDoneEvent} for more details. # # Emitted when an output item is marked done. # - # @param item [OpenAI::Responses::ResponseOutputMessage, OpenAI::Responses::ResponseFileSearchToolCall, OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, OpenAI::Responses::ResponseReasoningItem] The output item that was marked done. + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall, OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall, OpenAI::Models::Responses::ResponseOutputItem::McpCall, OpenAI::Models::Responses::ResponseOutputItem::McpListTools, OpenAI::Models::Responses::ResponseOutputItem::McpApprovalRequest] The output item that was marked done. # # @param output_index [Integer] The index of the output item that was marked done. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.output_item.done"] The type of the event. Always `response.output_item.done`. end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index d0d7f291..72e65a6d 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -13,7 +13,7 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # @!attribute content # The content of the output message. # - # @return [Array] + # @return [Array] required :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputMessage::Content] @@ -29,7 +29,7 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseOutputMessage::Status] + # @return [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] required :status, enum: -> { OpenAI::Responses::ResponseOutputMessage::Status } # @!attribute type @@ -40,15 +40,15 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, content:, status:, role: :assistant, type: :message) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputMessage} for more details. + # {OpenAI::Models::Responses::ResponseOutputMessage} for more details. # # An output message from the model. # # @param id [String] The unique ID of the output message. # - # @param content [Array] The content of the output message. + # @param content [Array] The content of the output message. # - # @param status [Symbol, OpenAI::Responses::ResponseOutputMessage::Status] The status of the message input. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] The status of the message input. One of `in_progress`, `completed`, or # # @param role [Symbol, :assistant] The role of the output message. Always `assistant`. # @@ -67,19 +67,13 @@ module Content variant :refusal, -> { OpenAI::Responses::ResponseOutputRefusal } # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any(OpenAI::Responses::ResponseOutputText, OpenAI::Responses::ResponseOutputRefusal) - end - end + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # - # @see OpenAI::Responses::ResponseOutputMessage#status + # @see OpenAI::Models::Responses::ResponseOutputMessage#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index fc08cb56..a9646f34 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -7,7 +7,7 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @!attribute annotations # The annotations of the text output. # - # @return [Array] + # @return [Array] required :annotations, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseOutputText::Annotation] @@ -28,7 +28,7 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @!method initialize(annotations:, text:, type: :output_text) # A text output from the model. # - # @param annotations [Array] The annotations of the text output. + # @param annotations [Array] The annotations of the text output. # # @param text [String] The text output from the model. # @@ -144,7 +144,8 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @!method initialize(file_id:, index:, type: :file_path) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseOutputText::Annotation::FilePath} for more details. + # {OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath} for more + # details. # # A path to a file. # @@ -156,17 +157,7 @@ class FilePath < OpenAI::Internal::Type::BaseModel end # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Responses::ResponseOutputText::Annotation::FilePath)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseOutputText::Annotation::FileCitation, - OpenAI::Responses::ResponseOutputText::Annotation::URLCitation, - OpenAI::Responses::ResponseOutputText::Annotation::FilePath - ) - end - end + # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] end end end diff --git a/lib/openai/models/responses/response_output_text_annotation_added_event.rb b/lib/openai/models/responses/response_output_text_annotation_added_event.rb new file mode 100644 index 00000000..eba4d8a7 --- /dev/null +++ b/lib/openai/models/responses/response_output_text_annotation_added_event.rb @@ -0,0 +1,68 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute annotation + # The annotation object being added. (See annotation schema for details.) + # + # @return [Object] + required :annotation, OpenAI::Internal::Type::Unknown + + # @!attribute annotation_index + # The index of the annotation within the content part. + # + # @return [Integer] + required :annotation_index, Integer + + # @!attribute content_index + # The index of the content part within the output item. + # + # @return [Integer] + required :content_index, Integer + + # @!attribute item_id + # The unique identifier of the item to which the annotation is being added. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.output_text_annotation.added'. + # + # @return [Symbol, :"response.output_text_annotation.added"] + required :type, const: :"response.output_text_annotation.added" + + # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, sequence_number:, type: :"response.output_text_annotation.added") + # Emitted when an annotation is added to output text content. + # + # @param annotation [Object] The annotation object being added. (See annotation schema for details.) + # + # @param annotation_index [Integer] The index of the annotation within the content part. + # + # @param content_index [Integer] The index of the content part within the output item. + # + # @param item_id [String] The unique identifier of the item to which the annotation is being added. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.output_text_annotation.added"] The type of the event. Always 'response.output_text_annotation.added'. + end + end + end +end diff --git a/lib/openai/models/responses/response_queued_event.rb b/lib/openai/models/responses/response_queued_event.rb new file mode 100644 index 00000000..0625aa8b --- /dev/null +++ b/lib/openai/models/responses/response_queued_event.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseQueuedEvent < OpenAI::Internal::Type::BaseModel + # @!attribute response + # The full response object that is queued. + # + # @return [OpenAI::Models::Responses::Response] + required :response, -> { OpenAI::Responses::Response } + + # @!attribute sequence_number + # The sequence number for this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.queued'. + # + # @return [Symbol, :"response.queued"] + required :type, const: :"response.queued" + + # @!method initialize(response:, sequence_number:, type: :"response.queued") + # Emitted when a response is queued and waiting to be processed. + # + # @param response [OpenAI::Models::Responses::Response] The full response object that is queued. + # + # @param sequence_number [Integer] The sequence number for this event. + # + # @param type [Symbol, :"response.queued"] The type of the event. Always 'response.queued'. + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_delta_event.rb b/lib/openai/models/responses/response_reasoning_delta_event.rb new file mode 100644 index 00000000..a8b51c21 --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_delta_event.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel + # @!attribute content_index + # The index of the reasoning content part within the output item. + # + # @return [Integer] + required :content_index, Integer + + # @!attribute delta + # The partial update to the reasoning content. + # + # @return [Object] + required :delta, OpenAI::Internal::Type::Unknown + + # @!attribute item_id + # The unique identifier of the item for which reasoning is being updated. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute type + # The type of the event. Always 'response.reasoning.delta'. + # + # @return [Symbol, :"response.reasoning.delta"] + required :type, const: :"response.reasoning.delta" + + # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.reasoning.delta") + # Emitted when there is a delta (partial update) to the reasoning content. + # + # @param content_index [Integer] The index of the reasoning content part within the output item. + # + # @param delta [Object] The partial update to the reasoning content. + # + # @param item_id [String] The unique identifier of the item for which reasoning is being updated. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param type [Symbol, :"response.reasoning.delta"] The type of the event. Always 'response.reasoning.delta'. + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_done_event.rb b/lib/openai/models/responses/response_reasoning_done_event.rb new file mode 100644 index 00000000..0c5e1861 --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_done_event.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute content_index + # The index of the reasoning content part within the output item. + # + # @return [Integer] + required :content_index, Integer + + # @!attribute item_id + # The unique identifier of the item for which reasoning is finalized. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute text + # The finalized reasoning text. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the event. Always 'response.reasoning.done'. + # + # @return [Symbol, :"response.reasoning.done"] + required :type, const: :"response.reasoning.done" + + # @!method initialize(content_index:, item_id:, output_index:, sequence_number:, text:, type: :"response.reasoning.done") + # Emitted when the reasoning content is finalized for an item. + # + # @param content_index [Integer] The index of the reasoning content part within the output item. + # + # @param item_id [String] The unique identifier of the item for which reasoning is finalized. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param text [String] The finalized reasoning text. + # + # @param type [Symbol, :"response.reasoning.done"] The type of the event. Always 'response.reasoning.done'. + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index 47d20960..fa878448 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -13,7 +13,7 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @!attribute summary # Reasoning text contents. # - # @return [Array] + # @return [Array] required :summary, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseReasoningItem::Summary] } @@ -34,12 +34,12 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @return [Symbol, OpenAI::Responses::ResponseReasoningItem::Status, nil] + # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status, nil] optional :status, enum: -> { OpenAI::Responses::ResponseReasoningItem::Status } # @!method initialize(id:, summary:, encrypted_content: nil, status: nil, type: :reasoning) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningItem} for more details. + # {OpenAI::Models::Responses::ResponseReasoningItem} for more details. # # A description of the chain of thought used by a reasoning model while generating # a response. Be sure to include these items in your `input` to the Responses API @@ -48,11 +48,11 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # # @param id [String] The unique identifier of the reasoning content. # - # @param summary [Array] Reasoning text contents. + # @param summary [Array] Reasoning text contents. # # @param encrypted_content [String, nil] The encrypted content of the reasoning item - populated when a response is # - # @param status [Symbol, OpenAI::Responses::ResponseReasoningItem::Status] The status of the item. One of `in_progress`, `completed`, or + # @param status [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] The status of the item. One of `in_progress`, `completed`, or # # @param type [Symbol, :reasoning] The type of the object. Always `reasoning`. @@ -71,7 +71,7 @@ class Summary < OpenAI::Internal::Type::BaseModel # @!method initialize(text:, type: :summary_text) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningItem::Summary} for more details. + # {OpenAI::Models::Responses::ResponseReasoningItem::Summary} for more details. # # @param text [String] A short summary of the reasoning used by the model when generating # @@ -81,7 +81,7 @@ class Summary < OpenAI::Internal::Type::BaseModel # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # - # @see OpenAI::Responses::ResponseReasoningItem#status + # @see OpenAI::Models::Responses::ResponseReasoningItem#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/responses/response_reasoning_summary_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_delta_event.rb new file mode 100644 index 00000000..f3299e9f --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_summary_delta_event.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel + # @!attribute delta + # The partial update to the reasoning summary content. + # + # @return [Object] + required :delta, OpenAI::Internal::Type::Unknown + + # @!attribute item_id + # The unique identifier of the item for which the reasoning summary is being + # updated. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute summary_index + # The index of the summary part within the output item. + # + # @return [Integer] + required :summary_index, Integer + + # @!attribute type + # The type of the event. Always 'response.reasoning_summary.delta'. + # + # @return [Symbol, :"response.reasoning_summary.delta"] + required :type, const: :"response.reasoning_summary.delta" + + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, summary_index:, type: :"response.reasoning_summary.delta") + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent} for more + # details. + # + # Emitted when there is a delta (partial update) to the reasoning summary content. + # + # @param delta [Object] The partial update to the reasoning summary content. + # + # @param item_id [String] The unique identifier of the item for which the reasoning summary is being updat + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param summary_index [Integer] The index of the summary part within the output item. + # + # @param type [Symbol, :"response.reasoning_summary.delta"] The type of the event. Always 'response.reasoning_summary.delta'. + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_summary_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_done_event.rb new file mode 100644 index 00000000..42716381 --- /dev/null +++ b/lib/openai/models/responses/response_reasoning_summary_done_event.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel + # @!attribute item_id + # The unique identifier of the item for which the reasoning summary is finalized. + # + # @return [String] + required :item_id, String + + # @!attribute output_index + # The index of the output item in the response's output array. + # + # @return [Integer] + required :output_index, Integer + + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + + # @!attribute summary_index + # The index of the summary part within the output item. + # + # @return [Integer] + required :summary_index, Integer + + # @!attribute text + # The finalized reasoning summary text. + # + # @return [String] + required :text, String + + # @!attribute type + # The type of the event. Always 'response.reasoning_summary.done'. + # + # @return [Symbol, :"response.reasoning_summary.done"] + required :type, const: :"response.reasoning_summary.done" + + # @!method initialize(item_id:, output_index:, sequence_number:, summary_index:, text:, type: :"response.reasoning_summary.done") + # Emitted when the reasoning summary content is finalized for an item. + # + # @param item_id [String] The unique identifier of the item for which the reasoning summary is finalized. + # + # @param output_index [Integer] The index of the output item in the response's output array. + # + # @param sequence_number [Integer] The sequence number of this event. + # + # @param summary_index [Integer] The index of the summary part within the output item. + # + # @param text [String] The finalized reasoning summary text. + # + # @param type [Symbol, :"response.reasoning_summary.done"] The type of the event. Always 'response.reasoning_summary.done'. + end + end + end +end diff --git a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb index 4701beaa..88b7f071 100644 --- a/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_part_added_event.rb @@ -19,9 +19,15 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The summary part that was added. # - # @return [OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part] + # @return [OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part] required :part, -> { OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute summary_index # The index of the summary part within the reasoning summary. # @@ -34,9 +40,10 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning_summary_part.added"] required :type, const: :"response.reasoning_summary_part.added" - # @!method initialize(item_id:, output_index:, part:, summary_index:, type: :"response.reasoning_summary_part.added") + # @!method initialize(item_id:, output_index:, part:, sequence_number:, summary_index:, type: :"response.reasoning_summary_part.added") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent} for more details. + # {OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent} for more + # details. # # Emitted when a new reasoning summary part is added. # @@ -44,13 +51,15 @@ class ResponseReasoningSummaryPartAddedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item this summary part is associated with. # - # @param part [OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part] The summary part that was added. + # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent::Part] The summary part that was added. + # + # @param sequence_number [Integer] The sequence number of this event. # # @param summary_index [Integer] The index of the summary part within the reasoning summary. # # @param type [Symbol, :"response.reasoning_summary_part.added"] The type of the event. Always `response.reasoning_summary_part.added`. - # @see OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent#part + # @see OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent#part class Part < OpenAI::Internal::Type::BaseModel # @!attribute text # The text of the summary part. diff --git a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb index 8b67b1ea..c45beaa4 100644 --- a/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_part_done_event.rb @@ -19,9 +19,15 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # @!attribute part # The completed summary part. # - # @return [OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part] + # @return [OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part] required :part, -> { OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part } + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute summary_index # The index of the summary part within the reasoning summary. # @@ -34,9 +40,10 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning_summary_part.done"] required :type, const: :"response.reasoning_summary_part.done" - # @!method initialize(item_id:, output_index:, part:, summary_index:, type: :"response.reasoning_summary_part.done") + # @!method initialize(item_id:, output_index:, part:, sequence_number:, summary_index:, type: :"response.reasoning_summary_part.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent} for more + # details. # # Emitted when a reasoning summary part is completed. # @@ -44,13 +51,15 @@ class ResponseReasoningSummaryPartDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item this summary part is associated with. # - # @param part [OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part] The completed summary part. + # @param part [OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent::Part] The completed summary part. + # + # @param sequence_number [Integer] The sequence number of this event. # # @param summary_index [Integer] The index of the summary part within the reasoning summary. # # @param type [Symbol, :"response.reasoning_summary_part.done"] The type of the event. Always `response.reasoning_summary_part.done`. - # @see OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent#part + # @see OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent#part class Part < OpenAI::Internal::Type::BaseModel # @!attribute text # The text of the summary part. diff --git a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb index 2aaefaf3..9f801ed9 100644 --- a/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_text_delta_event.rb @@ -22,6 +22,12 @@ class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute summary_index # The index of the summary part within the reasoning summary. # @@ -34,9 +40,10 @@ class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning_summary_text.delta"] required :type, const: :"response.reasoning_summary_text.delta" - # @!method initialize(delta:, item_id:, output_index:, summary_index:, type: :"response.reasoning_summary_text.delta") + # @!method initialize(delta:, item_id:, output_index:, sequence_number:, summary_index:, type: :"response.reasoning_summary_text.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent} for more + # details. # # Emitted when a delta is added to a reasoning summary text. # @@ -46,6 +53,8 @@ class ResponseReasoningSummaryTextDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item this summary text delta is associated with. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param summary_index [Integer] The index of the summary part within the reasoning summary. # # @param type [Symbol, :"response.reasoning_summary_text.delta"] The type of the event. Always `response.reasoning_summary_text.delta`. diff --git a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb index 5359b84a..d77661fc 100644 --- a/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb +++ b/lib/openai/models/responses/response_reasoning_summary_text_done_event.rb @@ -16,6 +16,12 @@ class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute summary_index # The index of the summary part within the reasoning summary. # @@ -34,9 +40,10 @@ class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.reasoning_summary_text.done"] required :type, const: :"response.reasoning_summary_text.done" - # @!method initialize(item_id:, output_index:, summary_index:, text:, type: :"response.reasoning_summary_text.done") + # @!method initialize(item_id:, output_index:, sequence_number:, summary_index:, text:, type: :"response.reasoning_summary_text.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent} for more + # details. # # Emitted when a reasoning summary text is completed. # @@ -44,6 +51,8 @@ class ResponseReasoningSummaryTextDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item this summary text is associated with. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param summary_index [Integer] The index of the summary part within the reasoning summary. # # @param text [String] The full text of the completed reasoning summary. diff --git a/lib/openai/models/responses/response_refusal_delta_event.rb b/lib/openai/models/responses/response_refusal_delta_event.rb index aef8a0d5..f912ec9e 100644 --- a/lib/openai/models/responses/response_refusal_delta_event.rb +++ b/lib/openai/models/responses/response_refusal_delta_event.rb @@ -28,15 +28,21 @@ class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.refusal.delta`. # # @return [Symbol, :"response.refusal.delta"] required :type, const: :"response.refusal.delta" - # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta") + # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.refusal.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseRefusalDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseRefusalDeltaEvent} for more details. # # Emitted when there is a partial refusal text. # @@ -48,6 +54,8 @@ class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the refusal text is added to. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.refusal.delta"] The type of the event. Always `response.refusal.delta`. end end diff --git a/lib/openai/models/responses/response_refusal_done_event.rb b/lib/openai/models/responses/response_refusal_done_event.rb index 912b1796..4d428943 100644 --- a/lib/openai/models/responses/response_refusal_done_event.rb +++ b/lib/openai/models/responses/response_refusal_done_event.rb @@ -28,15 +28,21 @@ class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel # @return [String] required :refusal, String + # @!attribute sequence_number + # The sequence number of this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.refusal.done`. # # @return [Symbol, :"response.refusal.done"] required :type, const: :"response.refusal.done" - # @!method initialize(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done") + # @!method initialize(content_index:, item_id:, output_index:, refusal:, sequence_number:, type: :"response.refusal.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseRefusalDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseRefusalDoneEvent} for more details. # # Emitted when refusal text is finalized. # @@ -48,6 +54,8 @@ class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel # # @param refusal [String] The refusal text that is finalized. # + # @param sequence_number [Integer] The sequence number of this event. + # # @param type [Symbol, :"response.refusal.done"] The type of the event. Always `response.refusal.done`. end end diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 07638190..8063503f 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -12,14 +12,14 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. # - # @return [Array, nil] + # @return [Array, nil] optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Responses::ResponseIncludable] } # @!method initialize(include: nil, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Responses::ResponseRetrieveParams} for more details. # - # @param include [Array] Additional fields to include in the response. See the `include` + # @param include [Array] Additional fields to include in the response. See the `include` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index eb628952..20e16a85 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -4,13 +4,15 @@ module OpenAI module Models module Responses # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, `cancelled`, `queued`, or `incomplete`. module ResponseStatus extend OpenAI::Internal::Type::Enum COMPLETED = :completed FAILED = :failed IN_PROGRESS = :in_progress + CANCELLED = :cancelled + QUEUED = :queued INCOMPLETE = :incomplete # @!method self.values diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index 4f6c6c4a..eb98e953 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -116,10 +116,6 @@ module ResponseStreamEvent # Emitted when refusal text is finalized. variant :"response.refusal.done", -> { OpenAI::Responses::ResponseRefusalDoneEvent } - # Emitted when a text annotation is added. - variant :"response.output_text.annotation.added", - -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent } - # Emitted when there is an additional text delta. variant :"response.output_text.delta", -> { OpenAI::Responses::ResponseTextDeltaEvent } @@ -138,51 +134,83 @@ module ResponseStreamEvent variant :"response.web_search_call.searching", -> { OpenAI::Responses::ResponseWebSearchCallSearchingEvent } + # Emitted when an image generation tool call has completed and the final image is available. + variant :"response.image_generation_call.completed", + -> { OpenAI::Responses::ResponseImageGenCallCompletedEvent } + + # Emitted when an image generation tool call is actively generating an image (intermediate state). + variant :"response.image_generation_call.generating", + -> { OpenAI::Responses::ResponseImageGenCallGeneratingEvent } + + # Emitted when an image generation tool call is in progress. + variant :"response.image_generation_call.in_progress", + -> { OpenAI::Responses::ResponseImageGenCallInProgressEvent } + + # Emitted when a partial image is available during image generation streaming. + variant :"response.image_generation_call.partial_image", + -> { OpenAI::Responses::ResponseImageGenCallPartialImageEvent } + + # Emitted when there is a delta (partial update) to the arguments of an MCP tool call. + variant :"response.mcp_call.arguments_delta", + -> { + OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent + } + + # Emitted when the arguments for an MCP tool call are finalized. + variant :"response.mcp_call.arguments_done", + -> { + OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent + } + + # Emitted when an MCP tool call has completed successfully. + variant :"response.mcp_call.completed", -> { OpenAI::Responses::ResponseMcpCallCompletedEvent } + + # Emitted when an MCP tool call has failed. + variant :"response.mcp_call.failed", -> { OpenAI::Responses::ResponseMcpCallFailedEvent } + + # Emitted when an MCP tool call is in progress. + variant :"response.mcp_call.in_progress", -> { OpenAI::Responses::ResponseMcpCallInProgressEvent } + + # Emitted when the list of available MCP tools has been successfully retrieved. + variant :"response.mcp_list_tools.completed", + -> { + OpenAI::Responses::ResponseMcpListToolsCompletedEvent + } + + # Emitted when the attempt to list available MCP tools has failed. + variant :"response.mcp_list_tools.failed", -> { OpenAI::Responses::ResponseMcpListToolsFailedEvent } + + # Emitted when the system is in the process of retrieving the list of available MCP tools. + variant :"response.mcp_list_tools.in_progress", + -> { OpenAI::Responses::ResponseMcpListToolsInProgressEvent } + + # Emitted when an annotation is added to output text content. + variant :"response.output_text_annotation.added", + -> { OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent } + + # Emitted when a response is queued and waiting to be processed. + variant :"response.queued", -> { OpenAI::Responses::ResponseQueuedEvent } + + # Emitted when there is a delta (partial update) to the reasoning content. + variant :"response.reasoning.delta", -> { OpenAI::Responses::ResponseReasoningDeltaEvent } + + # Emitted when the reasoning content is finalized for an item. + variant :"response.reasoning.done", -> { OpenAI::Responses::ResponseReasoningDoneEvent } + + # Emitted when there is a delta (partial update) to the reasoning summary content. + variant :"response.reasoning_summary.delta", + -> { + OpenAI::Responses::ResponseReasoningSummaryDeltaEvent + } + + # Emitted when the reasoning summary content is finalized for an item. + variant :"response.reasoning_summary.done", + -> { + OpenAI::Responses::ResponseReasoningSummaryDoneEvent + } + # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseAudioDeltaEvent, OpenAI::Responses::ResponseAudioDoneEvent, OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Responses::ResponseCompletedEvent, OpenAI::Responses::ResponseContentPartAddedEvent, OpenAI::Responses::ResponseContentPartDoneEvent, OpenAI::Responses::ResponseCreatedEvent, OpenAI::Responses::ResponseErrorEvent, OpenAI::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Responses::ResponseInProgressEvent, OpenAI::Responses::ResponseFailedEvent, OpenAI::Responses::ResponseIncompleteEvent, OpenAI::Responses::ResponseOutputItemAddedEvent, OpenAI::Responses::ResponseOutputItemDoneEvent, OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Responses::ResponseRefusalDeltaEvent, OpenAI::Responses::ResponseRefusalDoneEvent, OpenAI::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Responses::ResponseTextDeltaEvent, OpenAI::Responses::ResponseTextDoneEvent, OpenAI::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Responses::ResponseWebSearchCallSearchingEvent)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseAudioDeltaEvent, - OpenAI::Responses::ResponseAudioDoneEvent, - OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, - OpenAI::Responses::ResponseAudioTranscriptDoneEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, - OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, - OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, - OpenAI::Responses::ResponseCompletedEvent, - OpenAI::Responses::ResponseContentPartAddedEvent, - OpenAI::Responses::ResponseContentPartDoneEvent, - OpenAI::Responses::ResponseCreatedEvent, - OpenAI::Responses::ResponseErrorEvent, - OpenAI::Responses::ResponseFileSearchCallCompletedEvent, - OpenAI::Responses::ResponseFileSearchCallInProgressEvent, - OpenAI::Responses::ResponseFileSearchCallSearchingEvent, - OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, - OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, - OpenAI::Responses::ResponseInProgressEvent, - OpenAI::Responses::ResponseFailedEvent, - OpenAI::Responses::ResponseIncompleteEvent, - OpenAI::Responses::ResponseOutputItemAddedEvent, - OpenAI::Responses::ResponseOutputItemDoneEvent, - OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, - OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, - OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, - OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, - OpenAI::Responses::ResponseRefusalDeltaEvent, - OpenAI::Responses::ResponseRefusalDoneEvent, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent, - OpenAI::Responses::ResponseTextDeltaEvent, - OpenAI::Responses::ResponseTextDoneEvent, - OpenAI::Responses::ResponseWebSearchCallCompletedEvent, - OpenAI::Responses::ResponseWebSearchCallInProgressEvent, - OpenAI::Responses::ResponseWebSearchCallSearchingEvent - ) - end - end + # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartAddedEvent, OpenAI::Models::Responses::ResponseReasoningSummaryPartDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseImageGenCallCompletedEvent, OpenAI::Models::Responses::ResponseImageGenCallGeneratingEvent, OpenAI::Models::Responses::ResponseImageGenCallInProgressEvent, OpenAI::Models::Responses::ResponseImageGenCallPartialImageEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseMcpCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseMcpCallCompletedEvent, OpenAI::Models::Responses::ResponseMcpCallFailedEvent, OpenAI::Models::Responses::ResponseMcpCallInProgressEvent, OpenAI::Models::Responses::ResponseMcpListToolsCompletedEvent, OpenAI::Models::Responses::ResponseMcpListToolsFailedEvent, OpenAI::Models::Responses::ResponseMcpListToolsInProgressEvent, OpenAI::Models::Responses::ResponseOutputTextAnnotationAddedEvent, OpenAI::Models::Responses::ResponseQueuedEvent, OpenAI::Models::Responses::ResponseReasoningDeltaEvent, OpenAI::Models::Responses::ResponseReasoningDoneEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDeltaEvent, OpenAI::Models::Responses::ResponseReasoningSummaryDoneEvent)] end end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb deleted file mode 100644 index f8cc77c2..00000000 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ /dev/null @@ -1,205 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - module Responses - class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel - # @!attribute annotation - # A citation to a file. - # - # @return [OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] - required :annotation, union: -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation } - - # @!attribute annotation_index - # The index of the annotation that was added. - # - # @return [Integer] - required :annotation_index, Integer - - # @!attribute content_index - # The index of the content part that the text annotation was added to. - # - # @return [Integer] - required :content_index, Integer - - # @!attribute item_id - # The ID of the output item that the text annotation was added to. - # - # @return [String] - required :item_id, String - - # @!attribute output_index - # The index of the output item that the text annotation was added to. - # - # @return [Integer] - required :output_index, Integer - - # @!attribute type - # The type of the event. Always `response.output_text.annotation.added`. - # - # @return [Symbol, :"response.output_text.annotation.added"] - required :type, const: :"response.output_text.annotation.added" - - # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, type: :"response.output_text.annotation.added") - # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseTextAnnotationDeltaEvent} for more details. - # - # Emitted when a text annotation is added. - # - # @param annotation [OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] A citation to a file. - # - # @param annotation_index [Integer] The index of the annotation that was added. - # - # @param content_index [Integer] The index of the content part that the text annotation was added to. - # - # @param item_id [String] The ID of the output item that the text annotation was added to. - # - # @param output_index [Integer] The index of the output item that the text annotation was added to. - # - # @param type [Symbol, :"response.output_text.annotation.added"] The type of the event. Always `response.output_text.annotation.added`. - - # A citation to a file. - # - # @see OpenAI::Responses::ResponseTextAnnotationDeltaEvent#annotation - module Annotation - extend OpenAI::Internal::Type::Union - - discriminator :type - - # A citation to a file. - variant :file_citation, - -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation } - - # A citation for a web resource used to generate a model response. - variant :url_citation, - -> { - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation - } - - # A path to a file. - variant :file_path, -> { OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath } - - class FileCitation < OpenAI::Internal::Type::BaseModel - # @!attribute file_id - # The ID of the file. - # - # @return [String] - required :file_id, String - - # @!attribute index - # The index of the file in the list of files. - # - # @return [Integer] - required :index, Integer - - # @!attribute type - # The type of the file citation. Always `file_citation`. - # - # @return [Symbol, :file_citation] - required :type, const: :file_citation - - # @!method initialize(file_id:, index:, type: :file_citation) - # A citation to a file. - # - # @param file_id [String] The ID of the file. - # - # @param index [Integer] The index of the file in the list of files. - # - # @param type [Symbol, :file_citation] The type of the file citation. Always `file_citation`. - end - - class URLCitation < OpenAI::Internal::Type::BaseModel - # @!attribute end_index - # The index of the last character of the URL citation in the message. - # - # @return [Integer] - required :end_index, Integer - - # @!attribute start_index - # The index of the first character of the URL citation in the message. - # - # @return [Integer] - required :start_index, Integer - - # @!attribute title - # The title of the web resource. - # - # @return [String] - required :title, String - - # @!attribute type - # The type of the URL citation. Always `url_citation`. - # - # @return [Symbol, :url_citation] - required :type, const: :url_citation - - # @!attribute url - # The URL of the web resource. - # - # @return [String] - required :url, String - - # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) - # A citation for a web resource used to generate a model response. - # - # @param end_index [Integer] The index of the last character of the URL citation in the message. - # - # @param start_index [Integer] The index of the first character of the URL citation in the message. - # - # @param title [String] The title of the web resource. - # - # @param url [String] The URL of the web resource. - # - # @param type [Symbol, :url_citation] The type of the URL citation. Always `url_citation`. - end - - class FilePath < OpenAI::Internal::Type::BaseModel - # @!attribute file_id - # The ID of the file. - # - # @return [String] - required :file_id, String - - # @!attribute index - # The index of the file in the list of files. - # - # @return [Integer] - required :index, Integer - - # @!attribute type - # The type of the file path. Always `file_path`. - # - # @return [Symbol, :file_path] - required :type, const: :file_path - - # @!method initialize(file_id:, index:, type: :file_path) - # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath} for - # more details. - # - # A path to a file. - # - # @param file_id [String] The ID of the file. - # - # @param index [Integer] The index of the file in the list of files. - # - # @param type [Symbol, :file_path] The type of the file path. Always `file_path`. - end - - # @!method self.variants - # @return [Array(OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ) - end - end - end - end - end - end -end diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index 2d22f544..9a203e84 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -19,12 +19,12 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # ensures the message the model generates is valid JSON. Using `json_schema` is # preferred for models that support it. # - # @return [OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject, nil] + # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] optional :format_, union: -> { OpenAI::Responses::ResponseFormatTextConfig }, api_name: :format # @!method initialize(format_: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseTextConfig} for more details. + # {OpenAI::Models::Responses::ResponseTextConfig} for more details. # # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: @@ -32,7 +32,7 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) # - # @param format_ [OpenAI::ResponseFormatText, OpenAI::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. end end end diff --git a/lib/openai/models/responses/response_text_delta_event.rb b/lib/openai/models/responses/response_text_delta_event.rb index 48c639ef..b08caebc 100644 --- a/lib/openai/models/responses/response_text_delta_event.rb +++ b/lib/openai/models/responses/response_text_delta_event.rb @@ -28,15 +28,21 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number for this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.output_text.delta`. # # @return [Symbol, :"response.output_text.delta"] required :type, const: :"response.output_text.delta" - # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.output_text.delta") + # @!method initialize(content_index:, delta:, item_id:, output_index:, sequence_number:, type: :"response.output_text.delta") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseTextDeltaEvent} for more details. + # {OpenAI::Models::Responses::ResponseTextDeltaEvent} for more details. # # Emitted when there is an additional text delta. # @@ -48,6 +54,8 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the text delta was added to. # + # @param sequence_number [Integer] The sequence number for this event. + # # @param type [Symbol, :"response.output_text.delta"] The type of the event. Always `response.output_text.delta`. end end diff --git a/lib/openai/models/responses/response_text_done_event.rb b/lib/openai/models/responses/response_text_done_event.rb index 7b8921d4..319b4eeb 100644 --- a/lib/openai/models/responses/response_text_done_event.rb +++ b/lib/openai/models/responses/response_text_done_event.rb @@ -22,6 +22,12 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number for this event. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute text # The text content that is finalized. # @@ -34,9 +40,9 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_text.done"] required :type, const: :"response.output_text.done" - # @!method initialize(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done") + # @!method initialize(content_index:, item_id:, output_index:, sequence_number:, text:, type: :"response.output_text.done") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseTextDoneEvent} for more details. + # {OpenAI::Models::Responses::ResponseTextDoneEvent} for more details. # # Emitted when text content is finalized. # @@ -46,6 +52,8 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the text content is finalized. # + # @param sequence_number [Integer] The sequence number for this event. + # # @param text [String] The text content that is finalized. # # @param type [Symbol, :"response.output_text.done"] The type of the event. Always `response.output_text.done`. diff --git a/lib/openai/models/responses/response_usage.rb b/lib/openai/models/responses/response_usage.rb index 09831893..bd574dfa 100644 --- a/lib/openai/models/responses/response_usage.rb +++ b/lib/openai/models/responses/response_usage.rb @@ -13,7 +13,7 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # @!attribute input_tokens_details # A detailed breakdown of the input tokens. # - # @return [OpenAI::Responses::ResponseUsage::InputTokensDetails] + # @return [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] required :input_tokens_details, -> { OpenAI::Responses::ResponseUsage::InputTokensDetails } # @!attribute output_tokens @@ -25,7 +25,7 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # @!attribute output_tokens_details # A detailed breakdown of the output tokens. # - # @return [OpenAI::Responses::ResponseUsage::OutputTokensDetails] + # @return [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] required :output_tokens_details, -> { OpenAI::Responses::ResponseUsage::OutputTokensDetails } # @!attribute total_tokens @@ -40,15 +40,15 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # # @param input_tokens [Integer] The number of input tokens. # - # @param input_tokens_details [OpenAI::Responses::ResponseUsage::InputTokensDetails] A detailed breakdown of the input tokens. + # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] A detailed breakdown of the input tokens. # # @param output_tokens [Integer] The number of output tokens. # - # @param output_tokens_details [OpenAI::Responses::ResponseUsage::OutputTokensDetails] A detailed breakdown of the output tokens. + # @param output_tokens_details [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] A detailed breakdown of the output tokens. # # @param total_tokens [Integer] The total number of tokens used. - # @see OpenAI::Responses::ResponseUsage#input_tokens_details + # @see OpenAI::Models::Responses::ResponseUsage#input_tokens_details class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute cached_tokens # The number of tokens that were retrieved from the cache. @@ -59,14 +59,14 @@ class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @!method initialize(cached_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseUsage::InputTokensDetails} for more details. + # {OpenAI::Models::Responses::ResponseUsage::InputTokensDetails} for more details. # # A detailed breakdown of the input tokens. # # @param cached_tokens [Integer] The number of tokens that were retrieved from the cache. end - # @see OpenAI::Responses::ResponseUsage#output_tokens_details + # @see OpenAI::Models::Responses::ResponseUsage#output_tokens_details class OutputTokensDetails < OpenAI::Internal::Type::BaseModel # @!attribute reasoning_tokens # The number of reasoning tokens. diff --git a/lib/openai/models/responses/response_web_search_call_completed_event.rb b/lib/openai/models/responses/response_web_search_call_completed_event.rb index 4a8461ac..91ae2f37 100644 --- a/lib/openai/models/responses/response_web_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_web_search_call_completed_event.rb @@ -16,15 +16,22 @@ class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of the web search call being processed. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.web_search_call.completed`. # # @return [Symbol, :"response.web_search_call.completed"] required :type, const: :"response.web_search_call.completed" - # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.completed") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.web_search_call.completed") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseWebSearchCallCompletedEvent} for more details. + # {OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent} for more + # details. # # Emitted when a web search call is completed. # @@ -32,6 +39,8 @@ class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the web search call is associated with. # + # @param sequence_number [Integer] The sequence number of the web search call being processed. + # # @param type [Symbol, :"response.web_search_call.completed"] The type of the event. Always `response.web_search_call.completed`. end end diff --git a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb index a930db0f..f00f81c0 100644 --- a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb @@ -16,15 +16,22 @@ class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of the web search call being processed. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.web_search_call.in_progress`. # # @return [Symbol, :"response.web_search_call.in_progress"] required :type, const: :"response.web_search_call.in_progress" - # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.in_progress") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.web_search_call.in_progress") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseWebSearchCallInProgressEvent} for more details. + # {OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent} for more + # details. # # Emitted when a web search call is initiated. # @@ -32,6 +39,8 @@ class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the web search call is associated with. # + # @param sequence_number [Integer] The sequence number of the web search call being processed. + # # @param type [Symbol, :"response.web_search_call.in_progress"] The type of the event. Always `response.web_search_call.in_progress`. end end diff --git a/lib/openai/models/responses/response_web_search_call_searching_event.rb b/lib/openai/models/responses/response_web_search_call_searching_event.rb index 31a1e8c1..d1552a07 100644 --- a/lib/openai/models/responses/response_web_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_web_search_call_searching_event.rb @@ -16,15 +16,22 @@ class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @return [Integer] required :output_index, Integer + # @!attribute sequence_number + # The sequence number of the web search call being processed. + # + # @return [Integer] + required :sequence_number, Integer + # @!attribute type # The type of the event. Always `response.web_search_call.searching`. # # @return [Symbol, :"response.web_search_call.searching"] required :type, const: :"response.web_search_call.searching" - # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.searching") + # @!method initialize(item_id:, output_index:, sequence_number:, type: :"response.web_search_call.searching") # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ResponseWebSearchCallSearchingEvent} for more details. + # {OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent} for more + # details. # # Emitted when a web search call is executing. # @@ -32,6 +39,8 @@ class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # # @param output_index [Integer] The index of the output item that the web search call is associated with. # + # @param sequence_number [Integer] The sequence number of the web search call being processed. + # # @param type [Symbol, :"response.web_search_call.searching"] The type of the event. Always `response.web_search_call.searching`. end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index 2d2c6702..f97fae7e 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -9,31 +9,501 @@ module Tool discriminator :type - # A tool that searches for relevant content from uploaded files. Learn more about the [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - variant :file_search, -> { OpenAI::Responses::FileSearchTool } - # Defines a function in your own code the model can choose to call. Learn more about [function calling](https://platform.openai.com/docs/guides/function-calling). variant :function, -> { OpenAI::Responses::FunctionTool } + # A tool that searches for relevant content from uploaded files. Learn more about the [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + variant :file_search, -> { OpenAI::Responses::FileSearchTool } + # A tool that controls a virtual computer. Learn more about the [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). variant :computer_use_preview, -> { OpenAI::Responses::ComputerTool } + # Give the model access to additional tools via remote Model Context Protocol + # (MCP) servers. [Learn more about MCP](https://platform.openai.com/docs/guides/tools-remote-mcp). + variant :mcp, -> { OpenAI::Responses::Tool::Mcp } + + # A tool that runs Python code to help generate a response to a prompt. + variant :code_interpreter, -> { OpenAI::Responses::Tool::CodeInterpreter } + + # A tool that generates images using a model like `gpt-image-1`. + variant :image_generation, -> { OpenAI::Responses::Tool::ImageGeneration } + + # A tool that allows the model to execute shell commands in a local environment. + variant :local_shell, -> { OpenAI::Responses::Tool::LocalShell } + # This tool searches the web for relevant results to use in a response. Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search). variant -> { OpenAI::Responses::WebSearchTool } - # @!method self.variants - # @return [Array(OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, OpenAI::Responses::ComputerTool, OpenAI::Responses::WebSearchTool)] + class Mcp < OpenAI::Internal::Type::BaseModel + # @!attribute server_label + # A label for this MCP server, used to identify it in tool calls. + # + # @return [String] + required :server_label, String + + # @!attribute server_url + # The URL for the MCP server. + # + # @return [String] + required :server_url, String + + # @!attribute type + # The type of the MCP tool. Always `mcp`. + # + # @return [Symbol, :mcp] + required :type, const: :mcp + + # @!attribute allowed_tools + # List of allowed tool names or a filter object. + # + # @return [Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, nil] + optional :allowed_tools, union: -> { OpenAI::Responses::Tool::Mcp::AllowedTools }, nil?: true + + # @!attribute headers + # Optional HTTP headers to send to the MCP server. Use for authentication or other + # purposes. + # + # @return [Hash{Symbol=>String}, nil] + optional :headers, OpenAI::Internal::Type::HashOf[String], nil?: true + + # @!attribute require_approval + # Specify which of the MCP server's tools require approval. + # + # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting, nil] + optional :require_approval, union: -> { OpenAI::Responses::Tool::Mcp::RequireApproval }, nil?: true + + # @!method initialize(server_label:, server_url:, allowed_tools: nil, headers: nil, require_approval: nil, type: :mcp) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::Mcp} for more details. + # + # Give the model access to additional tools via remote Model Context Protocol + # (MCP) servers. + # [Learn more about MCP](https://platform.openai.com/docs/guides/tools-remote-mcp). + # + # @param server_label [String] A label for this MCP server, used to identify it in tool calls. + # + # @param server_url [String] The URL for the MCP server. + # + # @param allowed_tools [Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, nil] List of allowed tool names or a filter object. + # + # @param headers [Hash{Symbol=>String}, nil] Optional HTTP headers to send to the MCP server. Use for authentication + # + # @param require_approval [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting, nil] Specify which of the MCP server's tools require approval. + # + # @param type [Symbol, :mcp] The type of the MCP tool. Always `mcp`. + + # List of allowed tool names or a filter object. + # + # @see OpenAI::Models::Responses::Tool::Mcp#allowed_tools + module AllowedTools + extend OpenAI::Internal::Type::Union + + # A string array of allowed tool names + variant -> { OpenAI::Models::Responses::Tool::Mcp::AllowedTools::StringArray } + + # A filter object to specify which tools are allowed. + variant -> { OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter } + + class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel + # @!attribute tool_names + # List of allowed tool names. + # + # @return [Array, nil] + optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(tool_names: nil) + # A filter object to specify which tools are allowed. + # + # @param tool_names [Array] List of allowed tool names. + end + + # @!method self.variants + # @return [Array(Array, OpenAI::Models::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter)] + + # @type [OpenAI::Internal::Type::Converter] + StringArray = OpenAI::Internal::Type::ArrayOf[String] + end + + # Specify which of the MCP server's tools require approval. + # + # @see OpenAI::Models::Responses::Tool::Mcp#require_approval + module RequireApproval + extend OpenAI::Internal::Type::Union + + variant -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter } + + # Specify a single approval policy for all tools. One of `always` or + # `never`. When set to `always`, all tools will require approval. When + # set to `never`, all tools will not require approval. + variant enum: -> { OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting } + + class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel + # @!attribute always + # A list of tools that always require approval. + # + # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, nil] + optional :always, + -> { + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always + } + + # @!attribute never + # A list of tools that never require approval. + # + # @return [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, nil] + optional :never, + -> { + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + } + + # @!method initialize(always: nil, never: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter} + # for more details. + # + # @param always [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always] A list of tools that always require approval. + # + # @param never [OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never] A list of tools that never require approval. + + # @see OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#always + class Always < OpenAI::Internal::Type::BaseModel + # @!attribute tool_names + # List of tools that require approval. + # + # @return [Array, nil] + optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(tool_names: nil) + # A list of tools that always require approval. + # + # @param tool_names [Array] List of tools that require approval. + end + + # @see OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter#never + class Never < OpenAI::Internal::Type::BaseModel + # @!attribute tool_names + # List of tools that do not require approval. + # + # @return [Array, nil] + optional :tool_names, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(tool_names: nil) + # A list of tools that never require approval. + # + # @param tool_names [Array] List of tools that do not require approval. + end + end + + # Specify a single approval policy for all tools. One of `always` or `never`. When + # set to `always`, all tools will require approval. When set to `never`, all tools + # will not require approval. + module McpToolApprovalSetting + extend OpenAI::Internal::Type::Enum - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - OpenAI::Responses::FileSearchTool, - OpenAI::Responses::FunctionTool, - OpenAI::Responses::ComputerTool, - OpenAI::Responses::WebSearchTool - ) + ALWAYS = :always + NEVER = :never + + # @!method self.values + # @return [Array] + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, Symbol, OpenAI::Models::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting)] end end + + class CodeInterpreter < OpenAI::Internal::Type::BaseModel + # @!attribute container + # The code interpreter container. Can be a container ID or an object that + # specifies uploaded file IDs to make available to your code. + # + # @return [String, OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto] + required :container, union: -> { OpenAI::Responses::Tool::CodeInterpreter::Container } + + # @!attribute type + # The type of the code interpreter tool. Always `code_interpreter`. + # + # @return [Symbol, :code_interpreter] + required :type, const: :code_interpreter + + # @!method initialize(container:, type: :code_interpreter) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::CodeInterpreter} for more details. + # + # A tool that runs Python code to help generate a response to a prompt. + # + # @param container [String, OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto] The code interpreter container. Can be a container ID or an object that + # + # @param type [Symbol, :code_interpreter] The type of the code interpreter tool. Always `code_interpreter`. + + # The code interpreter container. Can be a container ID or an object that + # specifies uploaded file IDs to make available to your code. + # + # @see OpenAI::Models::Responses::Tool::CodeInterpreter#container + module Container + extend OpenAI::Internal::Type::Union + + # The container ID. + variant String + + # Configuration for a code interpreter container. Optionally specify the IDs + # of the files to run the code on. + variant -> { OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto } + + class CodeInterpreterToolAuto < OpenAI::Internal::Type::BaseModel + # @!attribute type + # Always `auto`. + # + # @return [Symbol, :auto] + required :type, const: :auto + + # @!attribute file_ids + # An optional list of uploaded files to make available to your code. + # + # @return [Array, nil] + optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] + + # @!method initialize(file_ids: nil, type: :auto) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto} + # for more details. + # + # Configuration for a code interpreter container. Optionally specify the IDs of + # the files to run the code on. + # + # @param file_ids [Array] An optional list of uploaded files to make available to your code. + # + # @param type [Symbol, :auto] Always `auto`. + end + + # @!method self.variants + # @return [Array(String, OpenAI::Models::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto)] + end + end + + class ImageGeneration < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of the image generation tool. Always `image_generation`. + # + # @return [Symbol, :image_generation] + required :type, const: :image_generation + + # @!attribute background + # Background type for the generated image. One of `transparent`, `opaque`, or + # `auto`. Default: `auto`. + # + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Background, nil] + optional :background, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Background } + + # @!attribute input_image_mask + # Optional mask for inpainting. Contains `image_url` (string, optional) and + # `file_id` (string, optional). + # + # @return [OpenAI::Models::Responses::Tool::ImageGeneration::InputImageMask, nil] + optional :input_image_mask, -> { OpenAI::Responses::Tool::ImageGeneration::InputImageMask } + + # @!attribute model + # The image generation model to use. Default: `gpt-image-1`. + # + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Model, nil] + optional :model, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Model } + + # @!attribute moderation + # Moderation level for the generated image. Default: `auto`. + # + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Moderation, nil] + optional :moderation, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Moderation } + + # @!attribute output_compression + # Compression level for the output image. Default: 100. + # + # @return [Integer, nil] + optional :output_compression, Integer + + # @!attribute output_format + # The output format of the generated image. One of `png`, `webp`, or `jpeg`. + # Default: `png`. + # + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::OutputFormat, nil] + optional :output_format, enum: -> { OpenAI::Responses::Tool::ImageGeneration::OutputFormat } + + # @!attribute partial_images + # Number of partial images to generate in streaming mode, from 0 (default value) + # to 3. + # + # @return [Integer, nil] + optional :partial_images, Integer + + # @!attribute quality + # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. + # Default: `auto`. + # + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Quality, nil] + optional :quality, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Quality } + + # @!attribute size + # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, + # or `auto`. Default: `auto`. + # + # @return [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Size, nil] + optional :size, enum: -> { OpenAI::Responses::Tool::ImageGeneration::Size } + + # @!method initialize(background: nil, input_image_mask: nil, model: nil, moderation: nil, output_compression: nil, output_format: nil, partial_images: nil, quality: nil, size: nil, type: :image_generation) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::ImageGeneration} for more details. + # + # A tool that generates images using a model like `gpt-image-1`. + # + # @param background [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Background] Background type for the generated image. One of `transparent`, + # + # @param input_image_mask [OpenAI::Models::Responses::Tool::ImageGeneration::InputImageMask] Optional mask for inpainting. Contains `image_url` + # + # @param model [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Model] The image generation model to use. Default: `gpt-image-1`. + # + # @param moderation [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Moderation] Moderation level for the generated image. Default: `auto`. + # + # @param output_compression [Integer] Compression level for the output image. Default: 100. + # + # @param output_format [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::OutputFormat] The output format of the generated image. One of `png`, `webp`, or + # + # @param partial_images [Integer] Number of partial images to generate in streaming mode, from 0 (default value) t + # + # @param quality [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Quality] The quality of the generated image. One of `low`, `medium`, `high`, + # + # @param size [Symbol, OpenAI::Models::Responses::Tool::ImageGeneration::Size] The size of the generated image. One of `1024x1024`, `1024x1536`, + # + # @param type [Symbol, :image_generation] The type of the image generation tool. Always `image_generation`. + + # Background type for the generated image. One of `transparent`, `opaque`, or + # `auto`. Default: `auto`. + # + # @see OpenAI::Models::Responses::Tool::ImageGeneration#background + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT = :transparent + OPAQUE = :opaque + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # @see OpenAI::Models::Responses::Tool::ImageGeneration#input_image_mask + class InputImageMask < OpenAI::Internal::Type::BaseModel + # @!attribute file_id + # File ID for the mask image. + # + # @return [String, nil] + optional :file_id, String + + # @!attribute image_url + # Base64-encoded mask image. + # + # @return [String, nil] + optional :image_url, String + + # @!method initialize(file_id: nil, image_url: nil) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Responses::Tool::ImageGeneration::InputImageMask} for more + # details. + # + # Optional mask for inpainting. Contains `image_url` (string, optional) and + # `file_id` (string, optional). + # + # @param file_id [String] File ID for the mask image. + # + # @param image_url [String] Base64-encoded mask image. + end + + # The image generation model to use. Default: `gpt-image-1`. + # + # @see OpenAI::Models::Responses::Tool::ImageGeneration#model + module Model + extend OpenAI::Internal::Type::Enum + + GPT_IMAGE_1 = :"gpt-image-1" + + # @!method self.values + # @return [Array] + end + + # Moderation level for the generated image. Default: `auto`. + # + # @see OpenAI::Models::Responses::Tool::ImageGeneration#moderation + module Moderation + extend OpenAI::Internal::Type::Enum + + AUTO = :auto + LOW = :low + + # @!method self.values + # @return [Array] + end + + # The output format of the generated image. One of `png`, `webp`, or `jpeg`. + # Default: `png`. + # + # @see OpenAI::Models::Responses::Tool::ImageGeneration#output_format + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG = :png + WEBP = :webp + JPEG = :jpeg + + # @!method self.values + # @return [Array] + end + + # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. + # Default: `auto`. + # + # @see OpenAI::Models::Responses::Tool::ImageGeneration#quality + module Quality + extend OpenAI::Internal::Type::Enum + + LOW = :low + MEDIUM = :medium + HIGH = :high + AUTO = :auto + + # @!method self.values + # @return [Array] + end + + # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, + # or `auto`. Default: `auto`. + # + # @see OpenAI::Models::Responses::Tool::ImageGeneration#size + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024 = :"1024x1024" + SIZE_1024X1536 = :"1024x1536" + SIZE_1536X1024 = :"1536x1024" + AUTO = :auto + + # @!method self.values + # @return [Array] + end + end + + class LocalShell < OpenAI::Internal::Type::BaseModel + # @!attribute type + # The type of the local shell tool. Always `local_shell`. + # + # @return [Symbol, :local_shell] + required :type, const: :local_shell + + # @!method initialize(type: :local_shell) + # A tool that allows the model to execute shell commands in a local environment. + # + # @param type [Symbol, :local_shell] The type of the local shell tool. Always `local_shell`. + end + + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::Tool::Mcp, OpenAI::Models::Responses::Tool::CodeInterpreter, OpenAI::Models::Responses::Tool::ImageGeneration, OpenAI::Models::Responses::Tool::LocalShell, OpenAI::Models::Responses::WebSearchTool)] end end end diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index 504daa0a..ef4278cd 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -13,18 +13,21 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # - `code_interpreter` + # - `mcp` + # - `image_generation` # - # @return [Symbol, OpenAI::Responses::ToolChoiceTypes::Type] + # @return [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] required :type, enum: -> { OpenAI::Responses::ToolChoiceTypes::Type } # @!method initialize(type:) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::ToolChoiceTypes} for more details. + # {OpenAI::Models::Responses::ToolChoiceTypes} for more details. # # Indicates that the model should use a built-in tool to generate a response. # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). # - # @param type [Symbol, OpenAI::Responses::ToolChoiceTypes::Type] The type of hosted tool the model should to use. Learn more about + # @param type [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] The type of hosted tool the model should to use. Learn more about # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). @@ -34,8 +37,11 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # - `code_interpreter` + # - `mcp` + # - `image_generation` # - # @see OpenAI::Responses::ToolChoiceTypes#type + # @see OpenAI::Models::Responses::ToolChoiceTypes#type module Type extend OpenAI::Internal::Type::Enum @@ -43,6 +49,9 @@ module Type WEB_SEARCH_PREVIEW = :web_search_preview COMPUTER_USE_PREVIEW = :computer_use_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 + IMAGE_GENERATION = :image_generation + CODE_INTERPRETER = :code_interpreter + MCP = :mcp # @!method self.values # @return [Array] diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 37dd39fe..4dd6b2ec 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -8,40 +8,40 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # The type of the web search tool. One of `web_search_preview` or # `web_search_preview_2025_03_11`. # - # @return [Symbol, OpenAI::Responses::WebSearchTool::Type] + # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] required :type, enum: -> { OpenAI::Responses::WebSearchTool::Type } # @!attribute search_context_size # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @return [Symbol, OpenAI::Responses::WebSearchTool::SearchContextSize, nil] + # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize, nil] optional :search_context_size, enum: -> { OpenAI::Responses::WebSearchTool::SearchContextSize } # @!attribute user_location # The user's location. # - # @return [OpenAI::Responses::WebSearchTool::UserLocation, nil] + # @return [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] optional :user_location, -> { OpenAI::Responses::WebSearchTool::UserLocation }, nil?: true # @!method initialize(type:, search_context_size: nil, user_location: nil) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::WebSearchTool} for more details. + # {OpenAI::Models::Responses::WebSearchTool} for more details. # # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). # - # @param type [Symbol, OpenAI::Responses::WebSearchTool::Type] The type of the web search tool. One of `web_search_preview` or `web_search_prev + # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] The type of the web search tool. One of `web_search_preview` or `web_search_prev # - # @param search_context_size [Symbol, OpenAI::Responses::WebSearchTool::SearchContextSize] High level guidance for the amount of context window space to use for the search + # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] High level guidance for the amount of context window space to use for the search # - # @param user_location [OpenAI::Responses::WebSearchTool::UserLocation, nil] The user's location. + # @param user_location [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] The user's location. # The type of the web search tool. One of `web_search_preview` or # `web_search_preview_2025_03_11`. # - # @see OpenAI::Responses::WebSearchTool#type + # @see OpenAI::Models::Responses::WebSearchTool#type module Type extend OpenAI::Internal::Type::Enum @@ -55,7 +55,7 @@ module Type # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # - # @see OpenAI::Responses::WebSearchTool#search_context_size + # @see OpenAI::Models::Responses::WebSearchTool#search_context_size module SearchContextSize extend OpenAI::Internal::Type::Enum @@ -67,7 +67,7 @@ module SearchContextSize # @return [Array] end - # @see OpenAI::Responses::WebSearchTool#user_location + # @see OpenAI::Models::Responses::WebSearchTool#user_location class UserLocation < OpenAI::Internal::Type::BaseModel # @!attribute type # The type of location approximation. Always `approximate`. @@ -103,7 +103,7 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @!method initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate) # Some parameter documentations has been truncated, see - # {OpenAI::Responses::WebSearchTool::UserLocation} for more details. + # {OpenAI::Models::Responses::WebSearchTool::UserLocation} for more details. # # The user's location. # diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index a35d5278..68b8b32e 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -24,17 +24,7 @@ module ResponsesOnlyModel end # @!method self.variants - # @return [Array(String, Symbol, OpenAI::ChatModel, Symbol, OpenAI::ResponsesModel::ResponsesOnlyModel)] - - define_sorbet_constant!(:Variants) do - T.type_alias do - T.any( - String, - OpenAI::ChatModel::TaggedSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol - ) - end - end + # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel)] end end end diff --git a/lib/openai/models/static_file_chunking_strategy.rb b/lib/openai/models/static_file_chunking_strategy.rb index 30388ebb..ac676b5d 100644 --- a/lib/openai/models/static_file_chunking_strategy.rb +++ b/lib/openai/models/static_file_chunking_strategy.rb @@ -20,7 +20,7 @@ class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) # Some parameter documentations has been truncated, see - # {OpenAI::StaticFileChunkingStrategy} for more details. + # {OpenAI::Models::StaticFileChunkingStrategy} for more details. # # @param chunk_overlap_tokens [Integer] The number of tokens that overlap between chunks. The default value is `400`. # diff --git a/lib/openai/models/static_file_chunking_strategy_object.rb b/lib/openai/models/static_file_chunking_strategy_object.rb index 2169aa9f..43c0f303 100644 --- a/lib/openai/models/static_file_chunking_strategy_object.rb +++ b/lib/openai/models/static_file_chunking_strategy_object.rb @@ -5,7 +5,7 @@ module Models class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::StaticFileChunkingStrategy] + # @return [OpenAI::Models::StaticFileChunkingStrategy] required :static, -> { OpenAI::StaticFileChunkingStrategy } # @!attribute type @@ -15,7 +15,7 @@ class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel required :type, const: :static # @!method initialize(static:, type: :static) - # @param static [OpenAI::StaticFileChunkingStrategy] + # @param static [OpenAI::Models::StaticFileChunkingStrategy] # # @param type [Symbol, :static] Always `static`. end diff --git a/lib/openai/models/static_file_chunking_strategy_object_param.rb b/lib/openai/models/static_file_chunking_strategy_object_param.rb index 304bacb5..3368e144 100644 --- a/lib/openai/models/static_file_chunking_strategy_object_param.rb +++ b/lib/openai/models/static_file_chunking_strategy_object_param.rb @@ -5,7 +5,7 @@ module Models class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel # @!attribute static # - # @return [OpenAI::StaticFileChunkingStrategy] + # @return [OpenAI::Models::StaticFileChunkingStrategy] required :static, -> { OpenAI::StaticFileChunkingStrategy } # @!attribute type @@ -17,7 +17,7 @@ class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel # @!method initialize(static:, type: :static) # Customize your own chunking strategy by setting chunk size and chunk overlap. # - # @param static [OpenAI::StaticFileChunkingStrategy] + # @param static [OpenAI::Models::StaticFileChunkingStrategy] # # @param type [Symbol, :static] Always `static`. end diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 8fe51192..e51974b2 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -51,18 +51,18 @@ class Upload < OpenAI::Internal::Type::BaseModel # @!attribute status # The status of the Upload. # - # @return [Symbol, OpenAI::Upload::Status] + # @return [Symbol, OpenAI::Models::Upload::Status] required :status, enum: -> { OpenAI::Upload::Status } # @!attribute file # The `File` object represents a document that has been uploaded to OpenAI. # - # @return [OpenAI::FileObject, nil] + # @return [OpenAI::Models::FileObject, nil] optional :file, -> { OpenAI::FileObject }, nil?: true # @!method initialize(id:, bytes:, created_at:, expires_at:, filename:, purpose:, status:, file: nil, object: :upload) - # Some parameter documentations has been truncated, see {OpenAI::Upload} for more - # details. + # Some parameter documentations has been truncated, see {OpenAI::Models::Upload} + # for more details. # # The Upload object can accept byte chunks in the form of Parts. # @@ -78,15 +78,15 @@ class Upload < OpenAI::Internal::Type::BaseModel # # @param purpose [String] The intended purpose of the file. [Please refer here](https://platform.openai.co # - # @param status [Symbol, OpenAI::Upload::Status] The status of the Upload. + # @param status [Symbol, OpenAI::Models::Upload::Status] The status of the Upload. # - # @param file [OpenAI::FileObject, nil] The `File` object represents a document that has been uploaded to OpenAI. + # @param file [OpenAI::Models::FileObject, nil] The `File` object represents a document that has been uploaded to OpenAI. # # @param object [Symbol, :upload] The object type, which is always "upload". # The status of the Upload. # - # @see OpenAI::Upload#status + # @see OpenAI::Models::Upload#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index 13caaed5..54e7530f 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -34,7 +34,7 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # See the # [documentation on File purposes](https://platform.openai.com/docs/api-reference/files/create#files-create-purpose). # - # @return [Symbol, OpenAI::FilePurpose] + # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::FilePurpose } # @!method initialize(bytes:, filename:, mime_type:, purpose:, request_options: {}) @@ -47,7 +47,7 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # # @param mime_type [String] The MIME type of the file. # - # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index 1ed39993..1df047de 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -11,14 +11,14 @@ class PartCreateParams < OpenAI::Internal::Type::BaseModel # @!attribute data # The chunk of bytes for this Part. # - # @return [Pathname, StringIO, IO, OpenAI::FilePart] + # @return [Pathname, StringIO, IO, String, OpenAI::FilePart] required :data, OpenAI::Internal::Type::FileInput # @!method initialize(data:, request_options: {}) # Some parameter documentations has been truncated, see # {OpenAI::Models::Uploads::PartCreateParams} for more details. # - # @param data [Pathname, StringIO, IO, OpenAI::FilePart] The chunk of bytes for this Part. + # @param data [Pathname, StringIO, IO, String, OpenAI::FilePart] The chunk of bytes for this Part. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index a48b84f7..6a930067 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -18,7 +18,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute file_counts # - # @return [OpenAI::VectorStore::FileCounts] + # @return [OpenAI::Models::VectorStore::FileCounts] required :file_counts, -> { OpenAI::VectorStore::FileCounts } # @!attribute last_active_at @@ -55,7 +55,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # `completed`. A status of `completed` indicates that the vector store is ready # for use. # - # @return [Symbol, OpenAI::VectorStore::Status] + # @return [Symbol, OpenAI::Models::VectorStore::Status] required :status, enum: -> { OpenAI::VectorStore::Status } # @!attribute usage_bytes @@ -67,8 +67,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStoreExpirationAfter, nil] - optional :expires_after, -> { OpenAI::VectorStoreExpirationAfter } + # @return [OpenAI::Models::VectorStore::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::VectorStore::ExpiresAfter } # @!attribute expires_at # The Unix timestamp (in seconds) for when the vector store will expire. @@ -77,8 +77,8 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :expires_at, Integer, nil?: true # @!method initialize(id:, created_at:, file_counts:, last_active_at:, metadata:, name:, status:, usage_bytes:, expires_after: nil, expires_at: nil, object: :vector_store) - # Some parameter documentations has been truncated, see {OpenAI::VectorStore} for - # more details. + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStore} for more details. # # A vector store is a collection of processed files can be used by the # `file_search` tool. @@ -87,7 +87,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store was created. # - # @param file_counts [OpenAI::VectorStore::FileCounts] + # @param file_counts [OpenAI::Models::VectorStore::FileCounts] # # @param last_active_at [Integer, nil] The Unix timestamp (in seconds) for when the vector store was last active. # @@ -95,17 +95,17 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # # @param name [String] The name of the vector store. # - # @param status [Symbol, OpenAI::VectorStore::Status] The status of the vector store, which can be either `expired`, `in_progress`, or + # @param status [Symbol, OpenAI::Models::VectorStore::Status] The status of the vector store, which can be either `expired`, `in_progress`, or # # @param usage_bytes [Integer] The total number of bytes used by the files in the vector store. # - # @param expires_after [OpenAI::VectorStoreExpirationAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::Models::VectorStore::ExpiresAfter] The expiration policy for a vector store. # # @param expires_at [Integer, nil] The Unix timestamp (in seconds) for when the vector store will expire. # # @param object [Symbol, :vector_store] The object type, which is always `vector_store`. - # @see OpenAI::VectorStore#file_counts + # @see OpenAI::Models::VectorStore#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel # @!attribute cancelled # The number of files that were cancelled. @@ -153,7 +153,7 @@ class FileCounts < OpenAI::Internal::Type::BaseModel # `completed`. A status of `completed` indicates that the vector store is ready # for use. # - # @see OpenAI::VectorStore#status + # @see OpenAI::Models::VectorStore#status module Status extend OpenAI::Internal::Type::Enum @@ -164,6 +164,32 @@ module Status # @!method self.values # @return [Array] end + + # @see OpenAI::Models::VectorStore#expires_after + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + # + # @return [Symbol, :last_active_at] + required :anchor, const: :last_active_at + + # @!attribute days + # The number of days after the anchor time that the vector store will expire. + # + # @return [Integer] + required :days, Integer + + # @!method initialize(days:, anchor: :last_active_at) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStore::ExpiresAfter} for more details. + # + # The expiration policy for a vector store. + # + # @param days [Integer] The number of days after the anchor time that the vector store will expire. + # + # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + end end end end diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index 237d6f5f..dfe50418 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -11,14 +11,14 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # - # @return [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam, nil] + # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategyParam } # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStoreExpirationAfter, nil] - optional :expires_after, -> { OpenAI::VectorStoreExpirationAfter } + # @return [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::VectorStoreCreateParams::ExpiresAfter } # @!attribute file_ids # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that @@ -49,9 +49,9 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStoreCreateParams} for more details. # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # - # @param expires_after [OpenAI::VectorStoreExpirationAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # @@ -60,6 +60,31 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @param name [String] The name of the vector store. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + # + # @return [Symbol, :last_active_at] + required :anchor, const: :last_active_at + + # @!attribute days + # The number of days after the anchor time that the vector store will expire. + # + # @return [Integer] + required :days, Integer + + # @!method initialize(days:, anchor: :last_active_at) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreCreateParams::ExpiresAfter} for more details. + # + # The expiration policy for a vector store. + # + # @param days [Integer] The number of days after the anchor time that the vector store will expire. + # + # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + end end end end diff --git a/lib/openai/models/vector_store_expiration_after.rb b/lib/openai/models/vector_store_expiration_after.rb deleted file mode 100644 index 905f4eaa..00000000 --- a/lib/openai/models/vector_store_expiration_after.rb +++ /dev/null @@ -1,30 +0,0 @@ -# frozen_string_literal: true - -module OpenAI - module Models - class VectorStoreExpirationAfter < OpenAI::Internal::Type::BaseModel - # @!attribute anchor - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - # - # @return [Symbol, :last_active_at] - required :anchor, const: :last_active_at - - # @!attribute days - # The number of days after the anchor time that the vector store will expire. - # - # @return [Integer] - required :days, Integer - - # @!method initialize(days:, anchor: :last_active_at) - # Some parameter documentations has been truncated, see - # {OpenAI::VectorStoreExpirationAfter} for more details. - # - # The expiration policy for a vector store. - # - # @param days [Integer] The number of days after the anchor time that the vector store will expire. - # - # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` - end - end -end diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 40712974..fcdc7d40 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -36,7 +36,7 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::VectorStoreListParams::Order, nil] + # @return [Symbol, OpenAI::Models::VectorStoreListParams::Order, nil] optional :order, enum: -> { OpenAI::VectorStoreListParams::Order } # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) @@ -49,7 +49,7 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 2009716d..c1ae6419 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -16,7 +16,7 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # @!attribute filters # A filter to apply based on file attributes. # - # @return [OpenAI::ComparisonFilter, OpenAI::CompoundFilter, nil] + # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] optional :filters, union: -> { OpenAI::VectorStoreSearchParams::Filters } # @!attribute max_num_results @@ -29,7 +29,7 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # @!attribute ranking_options # Ranking options for search. # - # @return [OpenAI::VectorStoreSearchParams::RankingOptions, nil] + # @return [OpenAI::Models::VectorStoreSearchParams::RankingOptions, nil] optional :ranking_options, -> { OpenAI::VectorStoreSearchParams::RankingOptions } # @!attribute rewrite_query @@ -44,11 +44,11 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # # @param query [String, Array] A query string for a search # - # @param filters [OpenAI::ComparisonFilter, OpenAI::CompoundFilter] A filter to apply based on file attributes. + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] A filter to apply based on file attributes. # # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 # - # @param ranking_options [OpenAI::VectorStoreSearchParams::RankingOptions] Ranking options for search. + # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] Ranking options for search. # # @param rewrite_query [Boolean] Whether to rewrite the natural language query for vector search. # @@ -60,15 +60,11 @@ module Query variant String - variant -> { OpenAI::VectorStoreSearchParams::Query::StringArray } + variant -> { OpenAI::Models::VectorStoreSearchParams::Query::StringArray } # @!method self.variants # @return [Array(String, Array)] - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, T::Array[String]) } - end - # @type [OpenAI::Internal::Type::Converter] StringArray = OpenAI::Internal::Type::ArrayOf[String] end @@ -84,17 +80,13 @@ module Filters variant -> { OpenAI::CompoundFilter } # @!method self.variants - # @return [Array(OpenAI::ComparisonFilter, OpenAI::CompoundFilter)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter) } - end + # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] end class RankingOptions < OpenAI::Internal::Type::BaseModel # @!attribute ranker # - # @return [Symbol, OpenAI::VectorStoreSearchParams::RankingOptions::Ranker, nil] + # @return [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::VectorStoreSearchParams::RankingOptions::Ranker } # @!attribute score_threshold @@ -105,10 +97,10 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @!method initialize(ranker: nil, score_threshold: nil) # Ranking options for search. # - # @param ranker [Symbol, OpenAI::VectorStoreSearchParams::RankingOptions::Ranker] + # @param ranker [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] # @param score_threshold [Float] - # @see OpenAI::VectorStoreSearchParams::RankingOptions#ranker + # @see OpenAI::Models::VectorStoreSearchParams::RankingOptions#ranker module Ranker extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 31f23ac7..5b623829 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -66,10 +66,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end class Content < OpenAI::Internal::Type::BaseModel diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index fef654c0..81557821 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -10,8 +10,8 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # @!attribute expires_after # The expiration policy for a vector store. # - # @return [OpenAI::VectorStoreExpirationAfter, nil] - optional :expires_after, -> { OpenAI::VectorStoreExpirationAfter }, nil?: true + # @return [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] + optional :expires_after, -> { OpenAI::VectorStoreUpdateParams::ExpiresAfter }, nil?: true # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful @@ -34,13 +34,38 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # Some parameter documentations has been truncated, see # {OpenAI::Models::VectorStoreUpdateParams} for more details. # - # @param expires_after [OpenAI::VectorStoreExpirationAfter, nil] The expiration policy for a vector store. + # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param name [String, nil] The name of the vector store. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + # @!attribute anchor + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + # + # @return [Symbol, :last_active_at] + required :anchor, const: :last_active_at + + # @!attribute days + # The number of days after the anchor time that the vector store will expire. + # + # @return [Integer] + required :days, Integer + + # @!method initialize(days:, anchor: :last_active_at) + # Some parameter documentations has been truncated, see + # {OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter} for more details. + # + # The expiration policy for a vector store. + # + # @param days [Integer] The number of days after the anchor time that the vector store will expire. + # + # @param anchor [Symbol, :last_active_at] Anchor timestamp after which the expiration policy applies. Supported anchors: ` + end end end end diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index f590ab7f..0815e0f1 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -34,7 +34,7 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # - # @return [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam, nil] + # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategyParam } # @!method initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) @@ -45,7 +45,7 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -60,10 +60,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index a6e6e635..d9f5bb5c 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -34,7 +34,7 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @!attribute filter # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # - # @return [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Filter, nil] + # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter, nil] optional :filter, enum: -> { OpenAI::VectorStores::FileBatchListFilesParams::Filter } # @!attribute limit @@ -48,7 +48,7 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Order, nil] + # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order, nil] optional :order, enum: -> { OpenAI::VectorStores::FileBatchListFilesParams::Order } # @!method initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) @@ -61,11 +61,11 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place # - # @param filter [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 0d1bc6ed..e4bfafa9 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -34,7 +34,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # - # @return [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam, nil] + # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategyParam } # @!method initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) @@ -45,7 +45,7 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] @@ -60,10 +60,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index dcebcd20..2540afea 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -29,7 +29,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @!attribute filter # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # - # @return [Symbol, OpenAI::VectorStores::FileListParams::Filter, nil] + # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter, nil] optional :filter, enum: -> { OpenAI::VectorStores::FileListParams::Filter } # @!attribute limit @@ -43,7 +43,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # - # @return [Symbol, OpenAI::VectorStores::FileListParams::Order, nil] + # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::VectorStores::FileListParams::Order } # @!method initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) @@ -54,11 +54,11 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place # - # @param filter [Symbol, OpenAI::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 30c15708..be3d5d7b 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -48,10 +48,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 594fd1bc..c6e737e3 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -21,7 +21,7 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # The last error associated with this vector store file. Will be `null` if there # are no errors. # - # @return [OpenAI::VectorStores::VectorStoreFile::LastError, nil] + # @return [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] required :last_error, -> { OpenAI::VectorStores::VectorStoreFile::LastError }, nil?: true # @!attribute object @@ -35,7 +35,7 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. # - # @return [Symbol, OpenAI::VectorStores::VectorStoreFile::Status] + # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] required :status, enum: -> { OpenAI::VectorStores::VectorStoreFile::Status } # @!attribute usage_bytes @@ -71,12 +71,12 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # @!attribute chunking_strategy # The strategy used to chunk the file. # - # @return [OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject, nil] + # @return [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject, nil] optional :chunking_strategy, union: -> { OpenAI::FileChunkingStrategy } # @!method initialize(id:, created_at:, last_error:, status:, usage_bytes:, vector_store_id:, attributes: nil, chunking_strategy: nil, object: :"vector_store.file") # Some parameter documentations has been truncated, see - # {OpenAI::VectorStores::VectorStoreFile} for more details. + # {OpenAI::Models::VectorStores::VectorStoreFile} for more details. # # A list of files attached to a vector store. # @@ -84,9 +84,9 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store file was created. # - # @param last_error [OpenAI::VectorStores::VectorStoreFile::LastError, nil] The last error associated with this vector store file. Will be `null` if there a + # @param last_error [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] The last error associated with this vector store file. Will be `null` if there a # - # @param status [Symbol, OpenAI::VectorStores::VectorStoreFile::Status] The status of the vector store file, which can be either `in_progress`, `complet + # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] The status of the vector store file, which can be either `in_progress`, `complet # # @param usage_bytes [Integer] The total vector store usage in bytes. Note that this may be different from the # @@ -94,16 +94,16 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::StaticFileChunkingStrategyObject, OpenAI::OtherFileChunkingStrategyObject] The strategy used to chunk the file. + # @param chunking_strategy [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] The strategy used to chunk the file. # # @param object [Symbol, :"vector_store.file"] The object type, which is always `vector_store.file`. - # @see OpenAI::VectorStores::VectorStoreFile#last_error + # @see OpenAI::Models::VectorStores::VectorStoreFile#last_error class LastError < OpenAI::Internal::Type::BaseModel # @!attribute code # One of `server_error` or `rate_limit_exceeded`. # - # @return [Symbol, OpenAI::VectorStores::VectorStoreFile::LastError::Code] + # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] required :code, enum: -> { OpenAI::VectorStores::VectorStoreFile::LastError::Code } # @!attribute message @@ -116,13 +116,13 @@ class LastError < OpenAI::Internal::Type::BaseModel # The last error associated with this vector store file. Will be `null` if there # are no errors. # - # @param code [Symbol, OpenAI::VectorStores::VectorStoreFile::LastError::Code] One of `server_error` or `rate_limit_exceeded`. + # @param code [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] One of `server_error` or `rate_limit_exceeded`. # # @param message [String] A human-readable description of the error. # One of `server_error` or `rate_limit_exceeded`. # - # @see OpenAI::VectorStores::VectorStoreFile::LastError#code + # @see OpenAI::Models::VectorStores::VectorStoreFile::LastError#code module Code extend OpenAI::Internal::Type::Enum @@ -139,7 +139,7 @@ module Code # `completed`, `cancelled`, or `failed`. The status `completed` indicates that the # vector store file is ready for use. # - # @see OpenAI::VectorStores::VectorStoreFile#status + # @see OpenAI::Models::VectorStores::VectorStoreFile#status module Status extend OpenAI::Internal::Type::Enum @@ -163,10 +163,6 @@ module Attribute # @!method self.variants # @return [Array(String, Float, Boolean)] - - define_sorbet_constant!(:Variants) do - T.type_alias { T.any(String, Float, T::Boolean) } - end end end end diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index b66e2b14..f4b251ee 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -20,7 +20,7 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @!attribute file_counts # - # @return [OpenAI::VectorStores::VectorStoreFileBatch::FileCounts] + # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] required :file_counts, -> { OpenAI::VectorStores::VectorStoreFileBatch::FileCounts } # @!attribute object @@ -33,7 +33,7 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. # - # @return [Symbol, OpenAI::VectorStores::VectorStoreFileBatch::Status] + # @return [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] required :status, enum: -> { OpenAI::VectorStores::VectorStoreFileBatch::Status } # @!attribute vector_store_id @@ -47,7 +47,7 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @!method initialize(id:, created_at:, file_counts:, status:, vector_store_id:, object: :"vector_store.files_batch") # Some parameter documentations has been truncated, see - # {OpenAI::VectorStores::VectorStoreFileBatch} for more details. + # {OpenAI::Models::VectorStores::VectorStoreFileBatch} for more details. # # A batch of files attached to a vector store. # @@ -55,15 +55,15 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # # @param created_at [Integer] The Unix timestamp (in seconds) for when the vector store files batch was create # - # @param file_counts [OpenAI::VectorStores::VectorStoreFileBatch::FileCounts] + # @param file_counts [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] # - # @param status [Symbol, OpenAI::VectorStores::VectorStoreFileBatch::Status] The status of the vector store files batch, which can be either `in_progress`, ` + # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] The status of the vector store files batch, which can be either `in_progress`, ` # # @param vector_store_id [String] The ID of the [vector store](https://platform.openai.com/docs/api-reference/vect # # @param object [Symbol, :"vector_store.files_batch"] The object type, which is always `vector_store.file_batch`. - # @see OpenAI::VectorStores::VectorStoreFileBatch#file_counts + # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel # @!attribute cancelled # The number of files that where cancelled. @@ -110,7 +110,7 @@ class FileCounts < OpenAI::Internal::Type::BaseModel # The status of the vector store files batch, which can be either `in_progress`, # `completed`, `cancelled` or `failed`. # - # @see OpenAI::VectorStores::VectorStoreFileBatch#status + # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#status module Status extend OpenAI::Internal::Type::Enum diff --git a/lib/openai/resources/audio/speech.rb b/lib/openai/resources/audio/speech.rb index befc2a60..744c3b69 100644 --- a/lib/openai/resources/audio/speech.rb +++ b/lib/openai/resources/audio/speech.rb @@ -13,13 +13,13 @@ class Speech # # @param input [String] The text to generate audio for. The maximum length is 4096 characters. # - # @param model [String, Symbol, OpenAI::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): + # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] One of the available [TTS models](https://platform.openai.com/docs/models#tts): # - # @param voice [String, Symbol, OpenAI::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, + # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] The voice to use when generating the audio. Supported voices are `alloy`, `ash`, # # @param instructions [String] Control the voice of your generated audio with additional instructions. Does not # - # @param response_format [Symbol, OpenAI::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav + # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, `wav # # @param speed [Float] The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is # diff --git a/lib/openai/resources/audio/transcriptions.rb b/lib/openai/resources/audio/transcriptions.rb index 65b01cb8..45570d65 100644 --- a/lib/openai/resources/audio/transcriptions.rb +++ b/lib/openai/resources/audio/transcriptions.rb @@ -14,27 +14,27 @@ class Transcriptions # # @overload create(file:, model:, chunking_strategy: nil, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # - # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # - # @param chunking_strategy [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs + # @param chunking_strategy [Symbol, :auto, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs # - # @param include [Array] Additional information to include in the transcription response. + # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment # - # @param response_format [Symbol, OpenAI::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # - # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Audio::Transcription, OpenAI::Audio::TranscriptionVerbose] + # @return [OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose] # # @see OpenAI::Models::Audio::TranscriptionCreateParams def create(params) @@ -63,27 +63,27 @@ def create(params) # # @overload create_streaming(file:, model:, chunking_strategy: nil, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) to transcribe, in one of these formats: fl # - # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transc # - # @param chunking_strategy [Symbol, :auto, OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs + # @param chunking_strategy [Symbol, :auto, OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig, nil] Controls how the audio is cut into chunks. When set to `"auto"`, the server firs # - # @param include [Array] Additional information to include in the transcription response. + # @param include [Array] Additional information to include in the transcription response. # # @param language [String] The language of the input audio. Supplying the input language in [ISO-639-1](htt # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment # - # @param response_format [Symbol, OpenAI::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # - # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format + # @param timestamp_granularities [Array] The timestamp granularities to populate for this transcription. `response_format # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Audio::TranscriptionCreateParams def create_streaming(params) diff --git a/lib/openai/resources/audio/translations.rb b/lib/openai/resources/audio/translations.rb index f43551ab..35ce0d09 100644 --- a/lib/openai/resources/audio/translations.rb +++ b/lib/openai/resources/audio/translations.rb @@ -11,19 +11,19 @@ class Translations # # @overload create(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The audio file object (not file name) translate, in one of these formats: flac, # - # @param model [String, Symbol, OpenAI::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh + # @param model [String, Symbol, OpenAI::Models::AudioModel] ID of the model to use. Only `whisper-1` (which is powered by our open source Wh # # @param prompt [String] An optional text to guide the model's style or continue a previous audio segment # - # @param response_format [Symbol, OpenAI::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo + # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] The format of the output, in one of these options: `json`, `text`, `srt`, `verbo # # @param temperature [Float] The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose] + # @return [OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose] # # @see OpenAI::Models::Audio::TranslationCreateParams def create(params) diff --git a/lib/openai/resources/batches.rb b/lib/openai/resources/batches.rb index 718d0a81..bdd8d876 100644 --- a/lib/openai/resources/batches.rb +++ b/lib/openai/resources/batches.rb @@ -10,9 +10,9 @@ class Batches # # @overload create(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) # - # @param completion_window [Symbol, OpenAI::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` + # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] The time frame within which the batch should be processed. Currently only `24h` # - # @param endpoint [Symbol, OpenAI::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` + # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] The endpoint to be used for all requests in the batch. Currently `/v1/responses` # # @param input_file_id [String] The ID of an uploaded file that contains requests for the new batch. # @@ -20,7 +20,7 @@ class Batches # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Batch] + # @return [OpenAI::Models::Batch] # # @see OpenAI::Models::BatchCreateParams def create(params) @@ -36,7 +36,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Batch] + # @return [OpenAI::Models::Batch] # # @see OpenAI::Models::BatchRetrieveParams def retrieve(batch_id, params = {}) @@ -61,7 +61,7 @@ def retrieve(batch_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::BatchListParams def list(params = {}) @@ -86,7 +86,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Batch] + # @return [OpenAI::Models::Batch] # # @see OpenAI::Models::BatchCancelParams def cancel(batch_id, params = {}) diff --git a/lib/openai/resources/beta/assistants.rb b/lib/openai/resources/beta/assistants.rb index ca192073..8b8adf5a 100644 --- a/lib/openai/resources/beta/assistants.rb +++ b/lib/openai/resources/beta/assistants.rb @@ -11,7 +11,7 @@ class Assistants # # @overload create(model:, description: nil, instructions: nil, metadata: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::ChatModel] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param description [String, nil] The description of the assistant. The maximum length is 512 characters. # @@ -21,21 +21,21 @@ class Assistants # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_resources [OpenAI::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Assistant] + # @return [OpenAI::Models::Beta::Assistant] # # @see OpenAI::Models::Beta::AssistantCreateParams def create(params) @@ -57,7 +57,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Assistant] + # @return [OpenAI::Models::Beta::Assistant] # # @see OpenAI::Models::Beta::AssistantRetrieveParams def retrieve(assistant_id, params = {}) @@ -84,25 +84,25 @@ def retrieve(assistant_id, params = {}) # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param name [String, nil] The name of the assistant. The maximum length is 256 characters. # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_resources [OpenAI::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per + # @param tools [Array] A list of tool enabled on the assistant. There can be a maximum of 128 tools per # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Assistant] + # @return [OpenAI::Models::Beta::Assistant] # # @see OpenAI::Models::Beta::AssistantUpdateParams def update(assistant_id, params = {}) @@ -129,11 +129,11 @@ def update(assistant_id, params = {}) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::AssistantListParams def list(params = {}) @@ -156,7 +156,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::AssistantDeleted] + # @return [OpenAI::Models::Beta::AssistantDeleted] # # @see OpenAI::Models::Beta::AssistantDeleteParams def delete(assistant_id, params = {}) diff --git a/lib/openai/resources/beta/threads.rb b/lib/openai/resources/beta/threads.rb index bf90b2a5..43c6986e 100644 --- a/lib/openai/resources/beta/threads.rb +++ b/lib/openai/resources/beta/threads.rb @@ -17,15 +17,15 @@ class Threads # # @overload create(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) # - # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to + # @param messages [Array] A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Thread] + # @return [OpenAI::Models::Beta::Thread] # # @see OpenAI::Models::Beta::ThreadCreateParams def create(params = {}) @@ -47,7 +47,7 @@ def create(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Thread] + # @return [OpenAI::Models::Beta::Thread] # # @see OpenAI::Models::Beta::ThreadRetrieveParams def retrieve(thread_id, params = {}) @@ -70,11 +70,11 @@ def retrieve(thread_id, params = {}) # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param tool_resources [OpenAI::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre + # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] A set of resources that are made available to the assistant's tools in this thre # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Thread] + # @return [OpenAI::Models::Beta::Thread] # # @see OpenAI::Models::Beta::ThreadUpdateParams def update(thread_id, params = {}) @@ -96,7 +96,7 @@ def update(thread_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::ThreadDeleted] + # @return [OpenAI::Models::Beta::ThreadDeleted] # # @see OpenAI::Models::Beta::ThreadDeleteParams def delete(thread_id, params = {}) @@ -127,29 +127,29 @@ def delete(thread_id, params = {}) # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param thread [OpenAI::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams def create_and_run(params) @@ -191,29 +191,29 @@ def stream # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param model [String, Symbol, OpenAI::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Specifies the format that the model must output. Compatible with [GPT-4o](https: # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param thread [OpenAI::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] Options to create a new thread. If no thread is provided when running a # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Controls which (if any) tool is called by the model. # - # @param tool_resources [OpenAI::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] A set of resources that are used by the assistant's tools. The resources are spe # - # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify + # @param tools [Array, nil] Override the tools the assistant can use for this run. This is useful for modify # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, where the # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Controls for how a thread will be truncated prior to the run. Use this to contro + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] Controls for how a thread will be truncated prior to the run. Use this to contro # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::ThreadCreateAndRunParams def stream_raw(params) diff --git a/lib/openai/resources/beta/threads/messages.rb b/lib/openai/resources/beta/threads/messages.rb index 503f7228..fbc2fcb8 100644 --- a/lib/openai/resources/beta/threads/messages.rb +++ b/lib/openai/resources/beta/threads/messages.rb @@ -14,17 +14,17 @@ class Messages # # @param thread_id [String] The ID of the [thread](https://platform.openai.com/docs/api-reference/threads) t # - # @param content [String, Array] The text contents of the message. + # @param content [String, Array] The text contents of the message. # - # @param role [Symbol, OpenAI::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] The role of the entity that is creating the message. Allowed values include: # - # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. + # @param attachments [Array, nil] A list of files attached to the message, and the tools they should be added to. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] # # @see OpenAI::Models::Beta::Threads::MessageCreateParams def create(thread_id, params) @@ -51,7 +51,7 @@ def create(thread_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] # # @see OpenAI::Models::Beta::Threads::MessageRetrieveParams def retrieve(message_id, params) @@ -83,7 +83,7 @@ def retrieve(message_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Message] + # @return [OpenAI::Models::Beta::Threads::Message] # # @see OpenAI::Models::Beta::Threads::MessageUpdateParams def update(message_id, params) @@ -116,13 +116,13 @@ def update(message_id, params) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param run_id [String] Filter messages by the run ID that generated them. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::MessageListParams def list(thread_id, params = {}) @@ -147,7 +147,7 @@ def list(thread_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::MessageDeleted] + # @return [OpenAI::Models::Beta::Threads::MessageDeleted] # # @see OpenAI::Models::Beta::Threads::MessageDeleteParams def delete(message_id, params) diff --git a/lib/openai/resources/beta/threads/runs.rb b/lib/openai/resources/beta/threads/runs.rb index a240f654..50128b5a 100644 --- a/lib/openai/resources/beta/threads/runs.rb +++ b/lib/openai/resources/beta/threads/runs.rb @@ -22,11 +22,11 @@ class Runs # # @param assistant_id [String] Body param: The ID of the [assistant](https://platform.openai.com/docs/api-refer # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param additional_instructions [String, nil] Body param: Appends additional instructions at the end of the instructions for t # - # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. + # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. # # @param instructions [String, nil] Body param: Overrides the [instructions](https://platform.openai.com/docs/api-re # @@ -36,27 +36,27 @@ class Runs # # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca # - # @param model [String, Symbol, OpenAI::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference # # @param parallel_tool_calls [Boolean] Body param: Whether to enable [parallel function calling](https://platform.opena # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Body param: **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP # # @param temperature [Float, nil] Body param: What sampling temperature to use, between 0 and 2. Higher values lik # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. # - # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu + # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu # # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunCreateParams def create(thread_id, params) @@ -90,11 +90,11 @@ def create(thread_id, params) # # @param assistant_id [String] Body param: The ID of the [assistant](https://platform.openai.com/docs/api-refer # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param additional_instructions [String, nil] Body param: Appends additional instructions at the end of the instructions for t # - # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. + # @param additional_messages [Array, nil] Body param: Adds additional messages to the thread before creating the run. # # @param instructions [String, nil] Body param: Overrides the [instructions](https://platform.openai.com/docs/api-re # @@ -104,27 +104,27 @@ def create(thread_id, params) # # @param metadata [Hash{Symbol=>String}, nil] Body param: Set of 16 key-value pairs that can be attached to an object. This ca # - # @param model [String, Symbol, OpenAI::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] Body param: The ID of the [Model](https://platform.openai.com/docs/api-reference # # @param parallel_tool_calls [Boolean] Body param: Whether to enable [parallel function calling](https://platform.opena # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] Body param: **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] Body param: **o-series models only** # - # @param response_format [Symbol, :auto, OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONObject, OpenAI::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] Body param: Specifies the format that the model must output. Compatible with [GP # # @param temperature [Float, nil] Body param: What sampling temperature to use, between 0 and 2. Higher values lik # - # @param tool_choice [Symbol, OpenAI::Beta::AssistantToolChoiceOption::Auto, OpenAI::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] Body param: Controls which (if any) tool is called by the model. # - # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu + # @param tools [Array, nil] Body param: Override the tools the assistant can use for this run. This is usefu # # @param top_p [Float, nil] Body param: An alternative to sampling with temperature, called nucleus sampling # - # @param truncation_strategy [OpenAI::Beta::TruncationObject, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] Body param: Controls for how a thread will be truncated prior to the run. Use th # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunCreateParams def create_stream_raw(thread_id, params) @@ -160,7 +160,7 @@ def create_stream_raw(thread_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunRetrieveParams def retrieve(run_id, params) @@ -192,7 +192,7 @@ def retrieve(run_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunUpdateParams def update(run_id, params) @@ -225,11 +225,11 @@ def update(run_id, params) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::RunListParams def list(thread_id, params = {}) @@ -254,7 +254,7 @@ def list(thread_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunCancelParams def cancel(run_id, params) @@ -288,11 +288,11 @@ def cancel(run_id, params) # # @param thread_id [String] Path param: The ID of the [thread](https://platform.openai.com/docs/api-referenc # - # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. + # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Run] + # @return [OpenAI::Models::Beta::Threads::Run] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams def submit_tool_outputs(run_id, params) @@ -331,11 +331,11 @@ def submit_tool_outputs(run_id, params) # # @param thread_id [String] Path param: The ID of the [thread](https://platform.openai.com/docs/api-referenc # - # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. + # @param tool_outputs [Array] Body param: A list of tools for which the outputs are being submitted. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams def submit_tool_outputs_stream_raw(run_id, params) diff --git a/lib/openai/resources/beta/threads/runs/steps.rb b/lib/openai/resources/beta/threads/runs/steps.rb index eaa27d6e..6accaeb6 100644 --- a/lib/openai/resources/beta/threads/runs/steps.rb +++ b/lib/openai/resources/beta/threads/runs/steps.rb @@ -19,11 +19,11 @@ class Steps # # @param run_id [String] Path param: The ID of the run to which the run step belongs. # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Beta::Threads::Runs::RunStep] + # @return [OpenAI::Models::Beta::Threads::Runs::RunStep] # # @see OpenAI::Models::Beta::Threads::Runs::StepRetrieveParams def retrieve(step_id, params) @@ -60,15 +60,15 @@ def retrieve(step_id, params) # # @param before [String] Query param: A cursor for use in pagination. `before` is an object ID that defin # - # @param include [Array] Query param: A list of additional fields to include in the response. Currently t + # @param include [Array] Query param: A list of additional fields to include in the response. Currently t # # @param limit [Integer] Query param: A limit on the number of objects to be returned. Limit can range be # - # @param order [Symbol, OpenAI::Beta::Threads::Runs::StepListParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for + # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Beta::Threads::Runs::StepListParams def list(run_id, params) diff --git a/lib/openai/resources/chat/completions.rb b/lib/openai/resources/chat/completions.rb index 7ee4fee4..9d3ee4d9 100644 --- a/lib/openai/resources/chat/completions.rb +++ b/lib/openai/resources/chat/completions.rb @@ -32,17 +32,17 @@ class Completions # # @overload create(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # - # @param messages [Array] A list of messages comprising the conversation so far. Depending on the + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # - # @param model [String, Symbol, OpenAI::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param audio [OpenAI::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with # # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param function_call [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. # - # @param functions [Array] Deprecated in favor of `tools`. + # @param functions [Array] Deprecated in favor of `tools`. # # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # @@ -54,47 +54,47 @@ class Completions # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param modalities [Array, nil] Output types that you would like the model to generate. + # @param modalities [Array, nil] Output types that you would like the model to generate. # # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param prediction [OpenAI::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # - # @param web_search_options [OpenAI::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Chat::ChatCompletion] + # @return [OpenAI::Models::Chat::ChatCompletion] # # @see OpenAI::Models::Chat::CompletionCreateParams def create(params) @@ -108,7 +108,7 @@ def create(params) model = nil tool_models = {} case parsed - in {response_format: OpenAI::Helpers::StructuredOutput::JsonSchemaConverter => model} + in {response_format: OpenAI::StructuredOutput::JsonSchemaConverter => model} parsed.update( response_format: { type: :json_schema, @@ -119,12 +119,12 @@ def create(params) } } ) - in {response_format: {type: :json_schema, json_schema: {schema: OpenAI::Helpers::StructuredOutput::JsonSchemaConverter => model}}} + in {response_format: {type: :json_schema, json_schema: {schema: OpenAI::StructuredOutput::JsonSchemaConverter => model}}} parsed.dig(:response_format, :json_schema).store(:schema, model.to_json_schema) in {tools: Array => tools} mapped = tools.map do |tool| case tool - in OpenAI::Helpers::StructuredOutput::JsonSchemaConverter + in OpenAI::StructuredOutput::JsonSchemaConverter name = tool.name.split("::").last tool_models.store(name, tool) { @@ -135,7 +135,7 @@ def create(params) parameters: tool.to_json_schema } } - in {function: {parameters: OpenAI::Helpers::StructuredOutput::JsonSchemaConverter => params}} + in {function: {parameters: OpenAI::StructuredOutput::JsonSchemaConverter => params}} func = tool.fetch(:function) name = func[:name] ||= params.name.split("::").last tool_models.store(name, params) @@ -148,7 +148,7 @@ def create(params) end unwrap = ->(raw) do - if model.is_a?(OpenAI::Helpers::StructuredOutput::JsonSchemaConverter) + if model.is_a?(OpenAI::StructuredOutput::JsonSchemaConverter) raw[:choices]&.each do |choice| message = choice.fetch(:message) parsed = JSON.parse(message.fetch(:content), symbolize_names: true) @@ -210,17 +210,17 @@ def stream # # @overload stream_raw(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # - # @param messages [Array] A list of messages comprising the conversation so far. Depending on the + # @param messages [Array] A list of messages comprising the conversation so far. Depending on the # - # @param model [String, Symbol, OpenAI::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param audio [OpenAI::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] Parameters for audio output. Required when audio output is requested with # # @param frequency_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param function_call [Symbol, OpenAI::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] Deprecated in favor of `tool_choice`. # - # @param functions [Array] Deprecated in favor of `tools`. + # @param functions [Array] Deprecated in favor of `tools`. # # @param logit_bias [Hash{Symbol=>Integer}, nil] Modify the likelihood of specified tokens appearing in the completion. # @@ -232,47 +232,47 @@ def stream # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param modalities [Array, nil] Output types that you would like the model to generate. + # @param modalities [Array, nil] Output types that you would like the model to generate. # # @param n [Integer, nil] How many chat completion choices to generate for each input message. Note that y # # @param parallel_tool_calls [Boolean] Whether to enable [parallel function calling](https://platform.openai.com/docs/g # - # @param prediction [OpenAI::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] Static predicted output content, such as the content of a text file that is # # @param presence_penalty [Float, nil] Number between -2.0 and 2.0. Positive values penalize new tokens based on # - # @param reasoning_effort [Symbol, OpenAI::ReasoningEffort, nil] **o-series models only** + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] **o-series models only** # - # @param response_format [OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, OpenAI::ResponseFormatJSONObject] An object specifying the format that the model must output. + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] An object specifying the format that the model must output. # # @param seed [Integer, nil] This feature is in Beta. # - # @param service_tier [Symbol, OpenAI::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # # @param store [Boolean, nil] Whether or not to store the output of this chat completion request for # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param tool_choice [Symbol, OpenAI::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] Controls which (if any) tool is called by the model. # - # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a + # @param tools [Array] A list of tools the model may call. Currently, only functions are supported as a # # @param top_logprobs [Integer, nil] An integer between 0 and 20 specifying the number of most likely tokens to # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # - # @param web_search_options [OpenAI::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] This tool searches the web for relevant results to use in a response. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Chat::CompletionCreateParams def stream_raw(params) @@ -302,7 +302,7 @@ def stream_raw(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Chat::ChatCompletion] + # @return [OpenAI::Models::Chat::ChatCompletion] # # @see OpenAI::Models::Chat::CompletionRetrieveParams def retrieve(completion_id, params = {}) @@ -329,7 +329,7 @@ def retrieve(completion_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Chat::ChatCompletion] + # @return [OpenAI::Models::Chat::ChatCompletion] # # @see OpenAI::Models::Chat::CompletionUpdateParams def update(completion_id, params) @@ -359,11 +359,11 @@ def update(completion_id, params) # # @param model [String] The model used to generate the Chat Completions. # - # @param order [Symbol, OpenAI::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` + # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] Sort order for Chat Completions by timestamp. Use `asc` for ascending order or ` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Chat::CompletionListParams def list(params = {}) @@ -387,7 +387,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Chat::ChatCompletionDeleted] + # @return [OpenAI::Models::Chat::ChatCompletionDeleted] # # @see OpenAI::Models::Chat::CompletionDeleteParams def delete(completion_id, params = {}) diff --git a/lib/openai/resources/chat/completions/messages.rb b/lib/openai/resources/chat/completions/messages.rb index 36a3e3c8..ea3e3382 100644 --- a/lib/openai/resources/chat/completions/messages.rb +++ b/lib/openai/resources/chat/completions/messages.rb @@ -19,11 +19,11 @@ class Messages # # @param limit [Integer] Number of messages to retrieve. # - # @param order [Symbol, OpenAI::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo + # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] Sort order for messages by timestamp. Use `asc` for ascending order or `desc` fo # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Chat::Completions::MessageListParams def list(completion_id, params = {}) diff --git a/lib/openai/resources/completions.rb b/lib/openai/resources/completions.rb index f65e1891..de1e8786 100644 --- a/lib/openai/resources/completions.rb +++ b/lib/openai/resources/completions.rb @@ -12,7 +12,7 @@ class Completions # # @overload create(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings # @@ -36,7 +36,7 @@ class Completions # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param suffix [String, nil] The suffix that comes after a completion of inserted text. # @@ -48,7 +48,7 @@ class Completions # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Completion] + # @return [OpenAI::Models::Completion] # # @see OpenAI::Models::CompletionCreateParams def create(params) @@ -75,7 +75,7 @@ def create(params) # # @overload create_streaming(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param prompt [String, Array, Array, Array>, nil] The prompt(s) to generate completions for, encoded as a string, array of strings # @@ -99,7 +99,7 @@ def create(params) # # @param stop [String, Array, nil] Not supported with latest reasoning models `o3` and `o4-mini`. # - # @param stream_options [OpenAI::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] Options for streaming response. Only set this when you set `stream: true`. # # @param suffix [String, nil] The suffix that comes after a completion of inserted text. # @@ -111,7 +111,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::CompletionCreateParams def create_streaming(params) diff --git a/lib/openai/resources/containers.rb b/lib/openai/resources/containers.rb new file mode 100644 index 00000000..2d582be8 --- /dev/null +++ b/lib/openai/resources/containers.rb @@ -0,0 +1,113 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Containers + # @return [OpenAI::Resources::Containers::Files] + attr_reader :files + + # Create Container + # + # @overload create(name:, expires_after: nil, file_ids: nil, request_options: {}) + # + # @param name [String] Name of the container to create. + # + # @param expires_after [OpenAI::Models::ContainerCreateParams::ExpiresAfter] Container expiration time in seconds relative to the 'anchor' time. + # + # @param file_ids [Array] IDs of files to copy to the container. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::ContainerCreateResponse] + # + # @see OpenAI::Models::ContainerCreateParams + def create(params) + parsed, options = OpenAI::ContainerCreateParams.dump_request(params) + @client.request( + method: :post, + path: "containers", + body: parsed, + model: OpenAI::Models::ContainerCreateResponse, + options: options + ) + end + + # Retrieve Container + # + # @overload retrieve(container_id, request_options: {}) + # + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::ContainerRetrieveResponse] + # + # @see OpenAI::Models::ContainerRetrieveParams + def retrieve(container_id, params = {}) + @client.request( + method: :get, + path: ["containers/%1$s", container_id], + model: OpenAI::Models::ContainerRetrieveResponse, + options: params[:request_options] + ) + end + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::ContainerListParams} for more details. + # + # List Containers + # + # @overload list(after: nil, limit: nil, order: nil, request_options: {}) + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # + # @param order [Symbol, OpenAI::Models::ContainerListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::CursorPage] + # + # @see OpenAI::Models::ContainerListParams + def list(params = {}) + parsed, options = OpenAI::ContainerListParams.dump_request(params) + @client.request( + method: :get, + path: "containers", + query: parsed, + page: OpenAI::Internal::CursorPage, + model: OpenAI::Models::ContainerListResponse, + options: options + ) + end + + # Delete Container + # + # @overload delete(container_id, request_options: {}) + # + # @param container_id [String] The ID of the container to delete. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [nil] + # + # @see OpenAI::Models::ContainerDeleteParams + def delete(container_id, params = {}) + @client.request( + method: :delete, + path: ["containers/%1$s", container_id], + model: NilClass, + options: params[:request_options] + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + @files = OpenAI::Resources::Containers::Files.new(client: client) + end + end + end +end diff --git a/lib/openai/resources/containers/files.rb b/lib/openai/resources/containers/files.rb new file mode 100644 index 00000000..356bead3 --- /dev/null +++ b/lib/openai/resources/containers/files.rb @@ -0,0 +1,135 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Containers + class Files + # @return [OpenAI::Resources::Containers::Files::Content] + attr_reader :content + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Containers::FileCreateParams} for more details. + # + # Create a Container File + # + # You can send either a multipart/form-data request with the raw file content, or + # a JSON request with a file ID. + # + # @overload create(container_id, file: nil, file_id: nil, request_options: {}) + # + # @param container_id [String] + # + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. + # + # @param file_id [String] Name of the file to create. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Containers::FileCreateResponse] + # + # @see OpenAI::Models::Containers::FileCreateParams + def create(container_id, params = {}) + parsed, options = OpenAI::Containers::FileCreateParams.dump_request(params) + @client.request( + method: :post, + path: ["containers/%1$s/files", container_id], + headers: {"content-type" => "multipart/form-data"}, + body: parsed, + model: OpenAI::Models::Containers::FileCreateResponse, + options: options + ) + end + + # Retrieve Container File + # + # @overload retrieve(file_id, container_id:, request_options: {}) + # + # @param file_id [String] + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Models::Containers::FileRetrieveResponse] + # + # @see OpenAI::Models::Containers::FileRetrieveParams + def retrieve(file_id, params) + parsed, options = OpenAI::Containers::FileRetrieveParams.dump_request(params) + container_id = + parsed.delete(:container_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :get, + path: ["containers/%1$s/files/%2$s", container_id, file_id], + model: OpenAI::Models::Containers::FileRetrieveResponse, + options: options + ) + end + + # Some parameter documentations has been truncated, see + # {OpenAI::Models::Containers::FileListParams} for more details. + # + # List Container files + # + # @overload list(container_id, after: nil, limit: nil, order: nil, request_options: {}) + # + # @param container_id [String] + # + # @param after [String] A cursor for use in pagination. `after` is an object ID that defines your place + # + # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 + # + # @param order [Symbol, OpenAI::Models::Containers::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [OpenAI::Internal::CursorPage] + # + # @see OpenAI::Models::Containers::FileListParams + def list(container_id, params = {}) + parsed, options = OpenAI::Containers::FileListParams.dump_request(params) + @client.request( + method: :get, + path: ["containers/%1$s/files", container_id], + query: parsed, + page: OpenAI::Internal::CursorPage, + model: OpenAI::Models::Containers::FileListResponse, + options: options + ) + end + + # Delete Container File + # + # @overload delete(file_id, container_id:, request_options: {}) + # + # @param file_id [String] + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [nil] + # + # @see OpenAI::Models::Containers::FileDeleteParams + def delete(file_id, params) + parsed, options = OpenAI::Containers::FileDeleteParams.dump_request(params) + container_id = + parsed.delete(:container_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :delete, + path: ["containers/%1$s/files/%2$s", container_id, file_id], + model: NilClass, + options: options + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + @content = OpenAI::Resources::Containers::Files::Content.new(client: client) + end + end + end + end +end diff --git a/lib/openai/resources/containers/files/content.rb b/lib/openai/resources/containers/files/content.rb new file mode 100644 index 00000000..3d07b16e --- /dev/null +++ b/lib/openai/resources/containers/files/content.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true + +module OpenAI + module Resources + class Containers + class Files + class Content + # Retrieve Container File Content + # + # @overload retrieve(file_id, container_id:, request_options: {}) + # + # @param file_id [String] + # @param container_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [nil] + # + # @see OpenAI::Models::Containers::Files::ContentRetrieveParams + def retrieve(file_id, params) + parsed, options = OpenAI::Containers::Files::ContentRetrieveParams.dump_request(params) + container_id = + parsed.delete(:container_id) do + raise ArgumentError.new("missing required path argument #{_1}") + end + @client.request( + method: :get, + path: ["containers/%1$s/files/%2$s/content", container_id, file_id], + model: NilClass, + options: options + ) + end + + # @api private + # + # @param client [OpenAI::Client] + def initialize(client:) + @client = client + end + end + end + end + end +end diff --git a/lib/openai/resources/embeddings.rb b/lib/openai/resources/embeddings.rb index 8d709c8b..934b58cc 100644 --- a/lib/openai/resources/embeddings.rb +++ b/lib/openai/resources/embeddings.rb @@ -12,17 +12,17 @@ class Embeddings # # @param input [String, Array, Array, Array>] Input text to embed, encoded as a string or array of tokens. To embed multiple i # - # @param model [String, Symbol, OpenAI::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co + # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] ID of the model to use. You can use the [List models](https://platform.openai.co # # @param dimensions [Integer] The number of dimensions the resulting output embeddings should have. Only suppo # - # @param encoding_format [Symbol, OpenAI::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http + # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] The format to return the embeddings in. Can be either `float` or [`base64`](http # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::CreateEmbeddingResponse] + # @return [OpenAI::Models::CreateEmbeddingResponse] # # @see OpenAI::Models::EmbeddingCreateParams def create(params) diff --git a/lib/openai/resources/evals.rb b/lib/openai/resources/evals.rb index 448254eb..cabdba94 100644 --- a/lib/openai/resources/evals.rb +++ b/lib/openai/resources/evals.rb @@ -10,16 +10,17 @@ class Evals # {OpenAI::Models::EvalCreateParams} for more details. # # Create the structure of an evaluation that can be used to test a model's - # performance. An evaluation is a set of testing criteria and a datasource. After + # performance. An evaluation is a set of testing criteria and the config for a + # data source, which dictates the schema of the data used in the evaluation. After # creating an evaluation, you can run it on different models and model parameters. # We support several types of graders and datasources. For more information, see # the [Evals guide](https://platform.openai.com/docs/guides/evals). # # @overload create(data_source_config:, testing_criteria:, metadata: nil, name: nil, request_options: {}) # - # @param data_source_config [OpenAI::EvalCreateParams::DataSourceConfig::Custom, OpenAI::EvalCreateParams::DataSourceConfig::Logs, OpenAI::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::Logs, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] The configuration for the data source used for the evaluation runs. Dictates the # - # @param testing_criteria [Array] A list of graders for all eval runs in this group. + # @param testing_criteria [Array] A list of graders for all eval runs in this group. Graders can reference variabl # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -101,9 +102,9 @@ def update(eval_id, params = {}) # # @param limit [Integer] Number of evals to retrieve. # - # @param order [Symbol, OpenAI::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d + # @param order [Symbol, OpenAI::Models::EvalListParams::Order] Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for d # - # @param order_by [Symbol, OpenAI::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use + # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] Evals can be ordered by creation time or last updated time. Use # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/evals/runs.rb b/lib/openai/resources/evals/runs.rb index 9b7fe78b..68e1590e 100644 --- a/lib/openai/resources/evals/runs.rb +++ b/lib/openai/resources/evals/runs.rb @@ -10,13 +10,15 @@ class Runs # Some parameter documentations has been truncated, see # {OpenAI::Models::Evals::RunCreateParams} for more details. # - # Create a new evaluation run. This is the endpoint that will kick off grading. + # Kicks off a new run for a given evaluation, specifying the data source, and what + # model configuration to use to test. The datasource will be validated against the + # schema specified in the config of the evaluation. # # @overload create(eval_id, data_source:, metadata: nil, name: nil, request_options: {}) # # @param eval_id [String] The ID of the evaluation to create a run for. # - # @param data_source [OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Evals::CreateEvalResponsesRunDataSource] Details about the run's data source. + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource, OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource] Details about the run's data source. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -78,9 +80,9 @@ def retrieve(run_id, params) # # @param limit [Integer] Number of runs to retrieve. # - # @param order [Symbol, OpenAI::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de + # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for de # - # @param status [Symbol, OpenAI::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` + # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] Filter runs by status. One of `queued` | `in_progress` | `failed` | `completed` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/evals/runs/output_items.rb b/lib/openai/resources/evals/runs/output_items.rb index 0d80996a..41f665d8 100644 --- a/lib/openai/resources/evals/runs/output_items.rb +++ b/lib/openai/resources/evals/runs/output_items.rb @@ -53,9 +53,9 @@ def retrieve(output_item_id, params) # # @param limit [Integer] Query param: Number of output items to retrieve. # - # @param order [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Order] Query param: Sort order for output items by timestamp. Use `asc` for ascending o + # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] Query param: Sort order for output items by timestamp. Use `asc` for ascending o # - # @param status [Symbol, OpenAI::Evals::Runs::OutputItemListParams::Status] Query param: Filter output items by status. Use `failed` to filter by failed out + # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] Query param: Filter output items by status. Use `failed` to filter by failed out # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/files.rb b/lib/openai/resources/files.rb index a7cff25c..34eccf82 100644 --- a/lib/openai/resources/files.rb +++ b/lib/openai/resources/files.rb @@ -30,13 +30,13 @@ class Files # # @overload create(file:, purpose:, request_options: {}) # - # @param file [Pathname, StringIO, IO, OpenAI::FilePart] The File object (not file name) to be uploaded. + # @param file [Pathname, StringIO, IO, String, OpenAI::FilePart] The File object (not file name) to be uploaded. # - # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. One of: - `assistants`: Used in the A # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FileObject] + # @return [OpenAI::Models::FileObject] # # @see OpenAI::Models::FileCreateParams def create(params) @@ -59,7 +59,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FileObject] + # @return [OpenAI::Models::FileObject] # # @see OpenAI::Models::FileRetrieveParams def retrieve(file_id, params = {}) @@ -82,13 +82,13 @@ def retrieve(file_id, params = {}) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param purpose [String] Only return files with the given purpose. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FileListParams def list(params = {}) @@ -111,7 +111,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FileDeleted] + # @return [OpenAI::Models::FileDeleted] # # @see OpenAI::Models::FileDeleteParams def delete(file_id, params = {}) diff --git a/lib/openai/resources/fine_tuning/alpha/graders.rb b/lib/openai/resources/fine_tuning/alpha/graders.rb index 5c7ba54b..52c4cc77 100644 --- a/lib/openai/resources/fine_tuning/alpha/graders.rb +++ b/lib/openai/resources/fine_tuning/alpha/graders.rb @@ -9,7 +9,7 @@ class Graders # # @overload run(grader:, model_sample:, reference_answer:, request_options: {}) # - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # # @param model_sample [String] The model sample to be evaluated. # @@ -35,7 +35,7 @@ def run(params) # # @overload validate(grader:, request_options: {}) # - # @param grader [OpenAI::Graders::StringCheckGrader, OpenAI::Graders::TextSimilarityGrader, OpenAI::Graders::PythonGrader, OpenAI::Graders::ScoreModelGrader, OpenAI::Graders::MultiGrader] The grader used for the fine-tuning job. + # @param grader [OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::Graders::TextSimilarityGrader, OpenAI::Models::Graders::PythonGrader, OpenAI::Models::Graders::ScoreModelGrader, OpenAI::Models::Graders::MultiGrader] The grader used for the fine-tuning job. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb index e11f3d60..f0afec14 100644 --- a/lib/openai/resources/fine_tuning/checkpoints/permissions.rb +++ b/lib/openai/resources/fine_tuning/checkpoints/permissions.rb @@ -54,7 +54,7 @@ def create(fine_tuned_model_checkpoint, params) # # @param limit [Integer] Number of permissions to retrieve. # - # @param order [Symbol, OpenAI::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. + # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] The order in which to retrieve permissions. # # @param project_id [String] The ID of the project to get permissions for. # diff --git a/lib/openai/resources/fine_tuning/jobs.rb b/lib/openai/resources/fine_tuning/jobs.rb index b6458288..c978c56c 100644 --- a/lib/openai/resources/fine_tuning/jobs.rb +++ b/lib/openai/resources/fine_tuning/jobs.rb @@ -20,17 +20,17 @@ class Jobs # # @overload create(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) # - # @param model [String, Symbol, OpenAI::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the + # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] The name of the model to fine-tune. You can select one of the # # @param training_file [String] The ID of an uploaded file that contains training data. # - # @param hyperparameters [OpenAI::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] The hyperparameters used for the fine-tuning job. # - # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. + # @param integrations [Array, nil] A list of integrations to enable for your fine-tuning job. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param method_ [OpenAI::FineTuning::JobCreateParams::Method] The method used for fine-tuning. + # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] The method used for fine-tuning. # # @param seed [Integer, nil] The seed controls the reproducibility of the job. Passing in the same seed and j # @@ -40,7 +40,7 @@ class Jobs # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FineTuning::FineTuningJob] + # @return [OpenAI::Models::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobCreateParams def create(params) @@ -67,7 +67,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FineTuning::FineTuningJob] + # @return [OpenAI::Models::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobRetrieveParams def retrieve(fine_tuning_job_id, params = {}) @@ -94,7 +94,7 @@ def retrieve(fine_tuning_job_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::JobListParams def list(params = {}) @@ -120,7 +120,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FineTuning::FineTuningJob] + # @return [OpenAI::Models::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobCancelParams def cancel(fine_tuning_job_id, params = {}) @@ -147,7 +147,7 @@ def cancel(fine_tuning_job_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::JobListEventsParams def list_events(fine_tuning_job_id, params = {}) @@ -173,7 +173,7 @@ def list_events(fine_tuning_job_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FineTuning::FineTuningJob] + # @return [OpenAI::Models::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobPauseParams def pause(fine_tuning_job_id, params = {}) @@ -196,7 +196,7 @@ def pause(fine_tuning_job_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::FineTuning::FineTuningJob] + # @return [OpenAI::Models::FineTuning::FineTuningJob] # # @see OpenAI::Models::FineTuning::JobResumeParams def resume(fine_tuning_job_id, params = {}) diff --git a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb index b588de3d..0483645d 100644 --- a/lib/openai/resources/fine_tuning/jobs/checkpoints.rb +++ b/lib/openai/resources/fine_tuning/jobs/checkpoints.rb @@ -20,7 +20,7 @@ class Checkpoints # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::FineTuning::Jobs::CheckpointListParams def list(fine_tuning_job_id, params = {}) diff --git a/lib/openai/resources/images.rb b/lib/openai/resources/images.rb index b04d709d..6a521f3b 100644 --- a/lib/openai/resources/images.rb +++ b/lib/openai/resources/images.rb @@ -10,21 +10,21 @@ class Images # # @overload create_variation(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le + # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart] The image to use as the basis for the variation(s). Must be a valid PNG file, le # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` is supported at this time # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param response_format [Symbol, OpenAI::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x + # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::ImagesResponse] + # @return [OpenAI::Models::ImagesResponse] # # @see OpenAI::Models::ImageCreateVariationParams def create_variation(params) @@ -47,29 +47,29 @@ def create_variation(params) # # @overload edit(image:, prompt:, background: nil, mask: nil, model: nil, n: nil, quality: nil, response_format: nil, size: nil, user: nil, request_options: {}) # - # @param image [Pathname, StringIO, IO, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. + # @param image [Pathname, StringIO, IO, String, OpenAI::FilePart, Array] The image(s) to edit. Must be a supported image file or an array of images. # # @param prompt [String] A text description of the desired image(s). The maximum length is 1000 character # - # @param background [Symbol, OpenAI::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). + # @param background [Symbol, OpenAI::Models::ImageEditParams::Background, nil] Allows to set transparency for the background of the generated image(s). # - # @param mask [Pathname, StringIO, IO, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind + # @param mask [Pathname, StringIO, IO, String, OpenAI::FilePart] An additional image whose fully transparent areas (e.g. where alpha is zero) ind # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are sup # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. # - # @param quality [Symbol, OpenAI::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are + # @param quality [Symbol, OpenAI::Models::ImageEditParams::Quality, nil] The quality of the image that will be generated. `high`, `medium` and `low` are # - # @param response_format [Symbol, OpenAI::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` + # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] The format in which the generated images are returned. Must be one of `url` or ` # - # @param size [Symbol, OpenAI::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::ImagesResponse] + # @return [OpenAI::Models::ImagesResponse] # # @see OpenAI::Models::ImageEditParams def edit(params) @@ -94,31 +94,31 @@ def edit(params) # # @param prompt [String] A text description of the desired image(s). The maximum length is 32000 characte # - # @param background [Symbol, OpenAI::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). + # @param background [Symbol, OpenAI::Models::ImageGenerateParams::Background, nil] Allows to set transparency for the background of the generated image(s). # - # @param model [String, Symbol, OpenAI::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] The model to use for image generation. One of `dall-e-2`, `dall-e-3`, or `gpt-im # - # @param moderation [Symbol, OpenAI::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must + # @param moderation [Symbol, OpenAI::Models::ImageGenerateParams::Moderation, nil] Control the content-moderation level for images generated by `gpt-image-1`. Must # # @param n [Integer, nil] The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only # # @param output_compression [Integer, nil] The compression level (0-100%) for the generated images. This parameter is only # - # @param output_format [Symbol, OpenAI::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su + # @param output_format [Symbol, OpenAI::Models::ImageGenerateParams::OutputFormat, nil] The format in which the generated images are returned. This parameter is only su # - # @param quality [Symbol, OpenAI::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] The quality of the image that will be generated. # - # @param response_format [Symbol, OpenAI::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned + # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] The format in which generated images with `dall-e-2` and `dall-e-3` are returned # - # @param size [Symbol, OpenAI::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands + # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] The size of the generated images. Must be one of `1024x1024`, `1536x1024` (lands # - # @param style [Symbol, OpenAI::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- + # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] The style of the generated images. This parameter is only supported for `dall-e- # # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::ImagesResponse] + # @return [OpenAI::Models::ImagesResponse] # # @see OpenAI::Models::ImageGenerateParams def generate(params) diff --git a/lib/openai/resources/models.rb b/lib/openai/resources/models.rb index 1dd26c31..8a3c6bab 100644 --- a/lib/openai/resources/models.rb +++ b/lib/openai/resources/models.rb @@ -12,7 +12,7 @@ class Models # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Model] + # @return [OpenAI::Models::Model] # # @see OpenAI::Models::ModelRetrieveParams def retrieve(model, params = {}) @@ -31,7 +31,7 @@ def retrieve(model, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Page] + # @return [OpenAI::Internal::Page] # # @see OpenAI::Models::ModelListParams def list(params = {}) @@ -53,7 +53,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::ModelDeleted] + # @return [OpenAI::Models::ModelDeleted] # # @see OpenAI::Models::ModelDeleteParams def delete(model, params = {}) diff --git a/lib/openai/resources/moderations.rb b/lib/openai/resources/moderations.rb index 568e03d3..a0b0e774 100644 --- a/lib/openai/resources/moderations.rb +++ b/lib/openai/resources/moderations.rb @@ -11,9 +11,9 @@ class Moderations # # @overload create(input:, model: nil, request_options: {}) # - # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or + # @param input [String, Array, Array] Input (or inputs) to classify. Can be a single string, an array of strings, or # - # @param model [String, Symbol, OpenAI::ModerationModel] The content moderation model you would like to use. Learn more in + # @param model [String, Symbol, OpenAI::Models::ModerationModel] The content moderation model you would like to use. Learn more in # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # diff --git a/lib/openai/resources/responses.rb b/lib/openai/resources/responses.rb index c6566956..0b23f4e0 100644 --- a/lib/openai/resources/responses.rb +++ b/lib/openai/resources/responses.rb @@ -23,13 +23,15 @@ class Responses # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload create(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload create(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # - # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param include [Array, nil] Specify additional output data to include in the model response. Currently + # @param background [Boolean, nil] Whether to run the model response in the background. + # + # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context # @@ -41,29 +43,29 @@ class Responses # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param truncation [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] # # @see OpenAI::Models::Responses::ResponseCreateParams def create(params) @@ -102,13 +104,15 @@ def stream # [file search](https://platform.openai.com/docs/guides/tools-file-search) to use # your own data as input for the model's response. # - # @overload stream_raw(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @overload stream_raw(input:, model:, background: nil, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # + # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. # - # @param input [String, Array] Text, image, or file inputs to the model, used to generate a response. + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI # - # @param model [String, Symbol, OpenAI::ChatModel, OpenAI::ResponsesModel::ResponsesOnlyModel] Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI + # @param background [Boolean, nil] Whether to run the model response in the background. # - # @param include [Array, nil] Specify additional output data to include in the model response. Currently + # @param include [Array, nil] Specify additional output data to include in the model response. Currently # # @param instructions [String, nil] Inserts a system (or developer) message as the first item in the model's context # @@ -120,29 +124,29 @@ def stream # # @param previous_response_id [String, nil] The unique ID of the previous response to the model. Use this to # - # @param reasoning [OpenAI::Reasoning, nil] **o-series models only** + # @param reasoning [OpenAI::Models::Reasoning, nil] **o-series models only** # - # @param service_tier [Symbol, OpenAI::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] Specifies the latency tier to use for processing the request. This parameter is # # @param store [Boolean, nil] Whether to store the generated model response for later retrieval via # # @param temperature [Float, nil] What sampling temperature to use, between 0 and 2. Higher values like 0.8 will m # - # @param text [OpenAI::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain + # @param text [OpenAI::Models::Responses::ResponseTextConfig] Configuration options for a text response from the model. Can be plain # - # @param tool_choice [Symbol, OpenAI::Responses::ToolChoiceOptions, OpenAI::Responses::ToolChoiceTypes, OpenAI::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] How the model should select which tool (or tools) to use when generating # - # @param tools [Array] An array of tools the model may call while generating a response. You + # @param tools [Array] An array of tools the model may call while generating a response. You # # @param top_p [Float, nil] An alternative to sampling with temperature, called nucleus sampling, # - # @param truncation [Symbol, OpenAI::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] The truncation strategy to use for the model response. # - # @param user [String] A unique identifier representing your end-user, which can help OpenAI to monitor + # @param user [String] A stable identifier for your end-users. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::Stream] + # @return [OpenAI::Internal::Stream] # # @see OpenAI::Models::Responses::ResponseCreateParams def stream_raw(params) @@ -172,11 +176,11 @@ def stream_raw(params) # # @param response_id [String] The ID of the response to retrieve. # - # @param include [Array] Additional fields to include in the response. See the `include` + # @param include [Array] Additional fields to include in the response. See the `include` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Responses::Response] + # @return [OpenAI::Models::Responses::Response] # # @see OpenAI::Models::Responses::ResponseRetrieveParams def retrieve(response_id, params = {}) @@ -210,6 +214,28 @@ def delete(response_id, params = {}) ) end + # Cancels a model response with the given ID. Only responses created with the + # `background` parameter set to `true` can be cancelled. + # [Learn more](https://platform.openai.com/docs/guides/background). + # + # @overload cancel(response_id, request_options: {}) + # + # @param response_id [String] The ID of the response to cancel. + # + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] + # + # @return [nil] + # + # @see OpenAI::Models::Responses::ResponseCancelParams + def cancel(response_id, params = {}) + @client.request( + method: :post, + path: ["responses/%1$s/cancel", response_id], + model: NilClass, + options: params[:request_options] + ) + end + # @api private # # @param client [OpenAI::Client] diff --git a/lib/openai/resources/responses/input_items.rb b/lib/openai/resources/responses/input_items.rb index 3cec0416..9b62eaa2 100644 --- a/lib/openai/resources/responses/input_items.rb +++ b/lib/openai/resources/responses/input_items.rb @@ -17,15 +17,15 @@ class InputItems # # @param before [String] An item ID to list items before, used in pagination. # - # @param include [Array] Additional fields to include in the response. See the `include` + # @param include [Array] Additional fields to include in the response. See the `include` # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between # - # @param order [Symbol, OpenAI::Responses::InputItemListParams::Order] The order to return the input items in. Default is `asc`. + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] The order to return the input items in. Default is `desc`. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::Responses::InputItemListParams def list(response_id, params = {}) diff --git a/lib/openai/resources/uploads.rb b/lib/openai/resources/uploads.rb index 363b2e57..65d75883 100644 --- a/lib/openai/resources/uploads.rb +++ b/lib/openai/resources/uploads.rb @@ -37,11 +37,11 @@ class Uploads # # @param mime_type [String] The MIME type of the file. # - # @param purpose [Symbol, OpenAI::FilePurpose] The intended purpose of the uploaded file. + # @param purpose [Symbol, OpenAI::Models::FilePurpose] The intended purpose of the uploaded file. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Upload] + # @return [OpenAI::Models::Upload] # # @see OpenAI::Models::UploadCreateParams def create(params) @@ -60,7 +60,7 @@ def create(params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Upload] + # @return [OpenAI::Models::Upload] # # @see OpenAI::Models::UploadCancelParams def cancel(upload_id, params = {}) @@ -99,7 +99,7 @@ def cancel(upload_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Upload] + # @return [OpenAI::Models::Upload] # # @see OpenAI::Models::UploadCompleteParams def complete(upload_id, params) diff --git a/lib/openai/resources/uploads/parts.rb b/lib/openai/resources/uploads/parts.rb index 7ad2e042..05f10f4c 100644 --- a/lib/openai/resources/uploads/parts.rb +++ b/lib/openai/resources/uploads/parts.rb @@ -23,11 +23,11 @@ class Parts # # @param upload_id [String] The ID of the Upload. # - # @param data [Pathname, StringIO, IO, OpenAI::FilePart] The chunk of bytes for this Part. + # @param data [Pathname, StringIO, IO, String, OpenAI::FilePart] The chunk of bytes for this Part. # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Uploads::UploadPart] + # @return [OpenAI::Models::Uploads::UploadPart] # # @see OpenAI::Models::Uploads::PartCreateParams def create(upload_id, params) diff --git a/lib/openai/resources/vector_stores.rb b/lib/openai/resources/vector_stores.rb index 4d0470d5..d903b9ef 100644 --- a/lib/openai/resources/vector_stores.rb +++ b/lib/openai/resources/vector_stores.rb @@ -16,9 +16,9 @@ class VectorStores # # @overload create(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # - # @param expires_after [OpenAI::VectorStoreExpirationAfter] The expiration policy for a vector store. + # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] The expiration policy for a vector store. # # @param file_ids [Array] A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # @@ -28,7 +28,7 @@ class VectorStores # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStore] + # @return [OpenAI::Models::VectorStore] # # @see OpenAI::Models::VectorStoreCreateParams def create(params = {}) @@ -50,7 +50,7 @@ def create(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStore] + # @return [OpenAI::Models::VectorStore] # # @see OpenAI::Models::VectorStoreRetrieveParams def retrieve(vector_store_id, params = {}) @@ -71,7 +71,7 @@ def retrieve(vector_store_id, params = {}) # # @param vector_store_id [String] The ID of the vector store to modify. # - # @param expires_after [OpenAI::VectorStoreExpirationAfter, nil] The expiration policy for a vector store. + # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] The expiration policy for a vector store. # # @param metadata [Hash{Symbol=>String}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # @@ -79,7 +79,7 @@ def retrieve(vector_store_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStore] + # @return [OpenAI::Models::VectorStore] # # @see OpenAI::Models::VectorStoreUpdateParams def update(vector_store_id, params = {}) @@ -106,11 +106,11 @@ def update(vector_store_id, params = {}) # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStoreListParams def list(params = {}) @@ -133,7 +133,7 @@ def list(params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStoreDeleted] + # @return [OpenAI::Models::VectorStoreDeleted] # # @see OpenAI::Models::VectorStoreDeleteParams def delete(vector_store_id, params = {}) @@ -157,11 +157,11 @@ def delete(vector_store_id, params = {}) # # @param query [String, Array] A query string for a search # - # @param filters [OpenAI::ComparisonFilter, OpenAI::CompoundFilter] A filter to apply based on file attributes. + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] A filter to apply based on file attributes. # # @param max_num_results [Integer] The maximum number of results to return. This number should be between 1 and 50 # - # @param ranking_options [OpenAI::VectorStoreSearchParams::RankingOptions] Ranking options for search. + # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] Ranking options for search. # # @param rewrite_query [Boolean] Whether to rewrite the natural language query for vector search. # diff --git a/lib/openai/resources/vector_stores/file_batches.rb b/lib/openai/resources/vector_stores/file_batches.rb index b8b4133c..99d3e7df 100644 --- a/lib/openai/resources/vector_stores/file_batches.rb +++ b/lib/openai/resources/vector_stores/file_batches.rb @@ -17,11 +17,11 @@ class FileBatches # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFileBatch] + # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] # # @see OpenAI::Models::VectorStores::FileBatchCreateParams def create(vector_store_id, params) @@ -45,7 +45,7 @@ def create(vector_store_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFileBatch] + # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] # # @see OpenAI::Models::VectorStores::FileBatchRetrieveParams def retrieve(batch_id, params) @@ -73,7 +73,7 @@ def retrieve(batch_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFileBatch] + # @return [OpenAI::Models::VectorStores::VectorStoreFileBatch] # # @see OpenAI::Models::VectorStores::FileBatchCancelParams def cancel(batch_id, params) @@ -105,15 +105,15 @@ def cancel(batch_id, params) # # @param before [String] Query param: A cursor for use in pagination. `before` is an object ID that defin # - # @param filter [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Filter] Query param: Filter by file status. One of `in_progress`, `completed`, `failed`, + # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] Query param: Filter by file status. One of `in_progress`, `completed`, `failed`, # # @param limit [Integer] Query param: A limit on the number of objects to be returned. Limit can range be # - # @param order [Symbol, OpenAI::VectorStores::FileBatchListFilesParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for + # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] Query param: Sort order by the `created_at` timestamp of the objects. `asc` for # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStores::FileBatchListFilesParams def list_files(batch_id, params) diff --git a/lib/openai/resources/vector_stores/files.rb b/lib/openai/resources/vector_stores/files.rb index 79c76c82..8852aed7 100644 --- a/lib/openai/resources/vector_stores/files.rb +++ b/lib/openai/resources/vector_stores/files.rb @@ -19,11 +19,11 @@ class Files # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] Set of 16 key-value pairs that can be attached to an object. This can be # - # @param chunking_strategy [OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] The chunking strategy used to chunk the file(s). If not set, will use the `auto` # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFile] + # @return [OpenAI::Models::VectorStores::VectorStoreFile] # # @see OpenAI::Models::VectorStores::FileCreateParams def create(vector_store_id, params) @@ -47,7 +47,7 @@ def create(vector_store_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFile] + # @return [OpenAI::Models::VectorStores::VectorStoreFile] # # @see OpenAI::Models::VectorStores::FileRetrieveParams def retrieve(file_id, params) @@ -79,7 +79,7 @@ def retrieve(file_id, params) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFile] + # @return [OpenAI::Models::VectorStores::VectorStoreFile] # # @see OpenAI::Models::VectorStores::FileUpdateParams def update(file_id, params) @@ -110,15 +110,15 @@ def update(file_id, params) # # @param before [String] A cursor for use in pagination. `before` is an object ID that defines your place # - # @param filter [Symbol, OpenAI::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. + # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @param limit [Integer] A limit on the number of objects to be returned. Limit can range between 1 and 1 # - # @param order [Symbol, OpenAI::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord + # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] Sort order by the `created_at` timestamp of the objects. `asc` for ascending ord # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::Internal::CursorPage] + # @return [OpenAI::Internal::CursorPage] # # @see OpenAI::Models::VectorStores::FileListParams def list(vector_store_id, params = {}) @@ -146,7 +146,7 @@ def list(vector_store_id, params = {}) # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}, nil] # - # @return [OpenAI::VectorStores::VectorStoreFileDeleted] + # @return [OpenAI::Models::VectorStores::VectorStoreFileDeleted] # # @see OpenAI::Models::VectorStores::FileDeleteParams def delete(file_id, params) diff --git a/lib/openai/structured_output.rb b/lib/openai/structured_output.rb index c4bbc24c..2c5bf0b3 100644 --- a/lib/openai/structured_output.rb +++ b/lib/openai/structured_output.rb @@ -1,6 +1,7 @@ # frozen_string_literal: true module OpenAI + StructuredOutput = OpenAI::Helpers::StructuredOutput ArrayOf = OpenAI::Helpers::StructuredOutput::ArrayOf BaseModel = OpenAI::Helpers::StructuredOutput::BaseModel Boolean = OpenAI::Helpers::StructuredOutput::Boolean diff --git a/lib/openai/version.rb b/lib/openai/version.rb index c559f45b..5dc9f2b6 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.1.0.pre.beta.1" + VERSION = "0.1.0.pre.beta.2" end diff --git a/rbi/openai/client.rbi b/rbi/openai/client.rbi index b89f1995..fc1f9348 100644 --- a/rbi/openai/client.rbi +++ b/rbi/openai/client.rbi @@ -67,6 +67,9 @@ module OpenAI sig { returns(OpenAI::Resources::Evals) } attr_reader :evals + sig { returns(OpenAI::Resources::Containers) } + attr_reader :containers + # @api private sig { override.returns(T::Hash[String, String]) } private def auth_headers diff --git a/rbi/openai/helpers/structured_output.rbi b/rbi/openai/helpers/structured_output.rbi new file mode 100644 index 00000000..2ebf849e --- /dev/null +++ b/rbi/openai/helpers/structured_output.rbi @@ -0,0 +1,16 @@ +# typed: strong + +module OpenAI + module Helpers + # Helpers for the structured output API. + # + # see https://platform.openai.com/docs/guides/structured-outputs + # see https://json-schema.org + # + # Based on the DSL in {OpenAI::Internal::Type}, but currently only support the limited subset of JSON schema types used in structured output APIs. + # + # Supported types: {NilClass} {String} {Symbol} {Integer} {Float} {OpenAI::Boolean}, {OpenAI::EnumOf}, {OpenAI::UnionOf}, {OpenAI::ArrayOf}, {OpenAI::BaseModel} + module StructuredOutput + end + end +end diff --git a/rbi/openai/helpers/structured_output/array_of.rbi b/rbi/openai/helpers/structured_output/array_of.rbi new file mode 100644 index 00000000..b0c70ce6 --- /dev/null +++ b/rbi/openai/helpers/structured_output/array_of.rbi @@ -0,0 +1,16 @@ +# typed: strong + +module OpenAI + module Helpers + module StructuredOutput + class ArrayOf < OpenAI::Internal::Type::ArrayOf + include OpenAI::Helpers::StructuredOutput::JsonSchemaConverter + + Elem = type_member(:out) + + sig { returns(String) } + attr_reader :description + end + end + end +end diff --git a/rbi/openai/helpers/structured_output/base_model.rbi b/rbi/openai/helpers/structured_output/base_model.rbi new file mode 100644 index 00000000..58f44979 --- /dev/null +++ b/rbi/openai/helpers/structured_output/base_model.rbi @@ -0,0 +1,22 @@ +# typed: strong + +module OpenAI + module Helpers + module StructuredOutput + # Represents a response from OpenAI's API where the model's output has been structured according to a schema predefined by the user. + # + # This class is specifically used when making requests with the `response_format` parameter set to use structured output (e.g., JSON). + # + # See {examples/structured_outputs_chat_completions.rb} for a complete example of use + class BaseModel < OpenAI::Internal::Type::BaseModel + extend OpenAI::Helpers::StructuredOutput::JsonSchemaConverter + + class << self + sig { returns(T.noreturn) } + def optional + end + end + end + end + end +end diff --git a/rbi/openai/helpers/structured_output/boolean.rbi b/rbi/openai/helpers/structured_output/boolean.rbi new file mode 100644 index 00000000..2bf57807 --- /dev/null +++ b/rbi/openai/helpers/structured_output/boolean.rbi @@ -0,0 +1,11 @@ +# typed: strong + +module OpenAI + module Helpers + module StructuredOutput + class Boolean < OpenAI::Internal::Type::Boolean + extend OpenAI::Helpers::StructuredOutput::JsonSchemaConverter + end + end + end +end diff --git a/rbi/openai/helpers/structured_output/enum_of.rbi b/rbi/openai/helpers/structured_output/enum_of.rbi new file mode 100644 index 00000000..f3fede55 --- /dev/null +++ b/rbi/openai/helpers/structured_output/enum_of.rbi @@ -0,0 +1,30 @@ +# typed: strong + +module OpenAI + module Helpers + module StructuredOutput + # @example + # example = OpenAI::EnumOf[:foo, :bar, :zoo] + # + # @example + # example = OpenAI::EnumOf[1, 2, 3] + class EnumOf + include OpenAI::Internal::Type::Enum + include OpenAI::Helpers::StructuredOutput::JsonSchemaConverter + + sig do + params( + values: T.any(NilClass, T::Boolean, Integer, Float, Symbol) + ).returns(T.attached_class) + end + def self.[](*values) + end + + sig do + returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) + end + attr_reader :values + end + end + end +end diff --git a/rbi/openai/helpers/structured_output/json_schema_converter.rbi b/rbi/openai/helpers/structured_output/json_schema_converter.rbi new file mode 100644 index 00000000..2d34df01 --- /dev/null +++ b/rbi/openai/helpers/structured_output/json_schema_converter.rbi @@ -0,0 +1,83 @@ +# typed: strong + +module OpenAI + module Helpers + module StructuredOutput + JsonSchema = T.type_alias { OpenAI::Internal::AnyHash } + + # To customize the JSON schema conversion for a type, implement the `JsonSchemaConverter` interface. + module JsonSchemaConverter + POINTER = T.let(Object.new.freeze, T.anything) + COUNTER = T.let(Object.new.freeze, T.anything) + + Input = + T.type_alias do + T.any( + OpenAI::Helpers::StructuredOutput::JsonSchemaConverter, + T::Class[T.anything] + ) + end + State = + T.type_alias do + { defs: T::Hash[Object, String], path: T::Array[String] } + end + + # The exact JSON schema produced is subject to improvement between minor release versions. + sig do + params( + state: OpenAI::Helpers::StructuredOutput::JsonSchemaConverter::State + ).returns(OpenAI::Helpers::StructuredOutput::JsonSchema) + end + def to_json_schema_inner(state:) + end + + # Internal helpers methods. + class << self + # @api private + sig do + params( + schema: OpenAI::Helpers::StructuredOutput::JsonSchema + ).returns(OpenAI::Helpers::StructuredOutput::JsonSchema) + end + def to_nilable(schema) + end + + # @api private + sig do + params( + state: + OpenAI::Helpers::StructuredOutput::JsonSchemaConverter::State, + type: + OpenAI::Helpers::StructuredOutput::JsonSchemaConverter::Input, + blk: T.proc.returns(OpenAI::Helpers::StructuredOutput::JsonSchema) + ).void + end + def cache_def!(state, type:, &blk) + end + + # @api private + sig do + params( + type: + OpenAI::Helpers::StructuredOutput::JsonSchemaConverter::Input + ).returns(OpenAI::Helpers::StructuredOutput::JsonSchema) + end + def to_json_schema(type) + end + + # @api private + sig do + params( + type: + OpenAI::Helpers::StructuredOutput::JsonSchemaConverter::Input, + state: + OpenAI::Helpers::StructuredOutput::JsonSchemaConverter::State + ).returns(OpenAI::Helpers::StructuredOutput::JsonSchema) + end + def to_json_schema_inner(type, state:) + end + end + end + end + end +end diff --git a/rbi/openai/helpers/structured_output/union_of.rbi b/rbi/openai/helpers/structured_output/union_of.rbi new file mode 100644 index 00000000..bc67b988 --- /dev/null +++ b/rbi/openai/helpers/structured_output/union_of.rbi @@ -0,0 +1,23 @@ +# typed: strong + +module OpenAI + module Helpers + module StructuredOutput + # @example + # example = OpenAI::UnionOf[Float, OpenAI::ArrayOf[Integer]] + class UnionOf + include OpenAI::Internal::Type::Union + include OpenAI::Helpers::StructuredOutput::JsonSchemaConverter + + sig do + params( + variants: + OpenAI::Helpers::StructuredOutput::JsonSchemaConverter::Input + ).returns(T.attached_class) + end + def self.[](*variants) + end + end + end + end +end diff --git a/rbi/openai/internal.rbi b/rbi/openai/internal.rbi index 135f6cba..eeddce6e 100644 --- a/rbi/openai/internal.rbi +++ b/rbi/openai/internal.rbi @@ -8,6 +8,9 @@ module OpenAI # this alias might be refined in the future. AnyHash = T.type_alias { T::Hash[Symbol, T.anything] } + FileInput = + T.type_alias { T.any(Pathname, StringIO, IO, String, OpenAI::FilePart) } + OMIT = T.let(Object.new.freeze, T.anything) end end diff --git a/rbi/openai/internal/type/array_of.rbi b/rbi/openai/internal/type/array_of.rbi index 9cc138b7..28eaab6f 100644 --- a/rbi/openai/internal/type/array_of.rbi +++ b/rbi/openai/internal/type/array_of.rbi @@ -8,6 +8,7 @@ module OpenAI # Array of items of a given type. class ArrayOf include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport abstract! @@ -63,6 +64,11 @@ module OpenAI def dump(value, state:) end + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end + # @api private sig { returns(Elem) } protected def item_type diff --git a/rbi/openai/internal/type/base_model.rbi b/rbi/openai/internal/type/base_model.rbi index 15fabf91..df16ad79 100644 --- a/rbi/openai/internal/type/base_model.rbi +++ b/rbi/openai/internal/type/base_model.rbi @@ -190,6 +190,11 @@ module OpenAI end def dump(value, state:) end + + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end end class << self diff --git a/rbi/openai/internal/type/boolean.rbi b/rbi/openai/internal/type/boolean.rbi index b7cc1e3d..73bf95cb 100644 --- a/rbi/openai/internal/type/boolean.rbi +++ b/rbi/openai/internal/type/boolean.rbi @@ -8,6 +8,7 @@ module OpenAI # Ruby has no Boolean class; this is something for models to refer to. class Boolean extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport abstract! @@ -43,6 +44,11 @@ module OpenAI end def dump(value, state:) end + + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end end end end diff --git a/rbi/openai/internal/type/enum.rbi b/rbi/openai/internal/type/enum.rbi index 1a1c4c47..fa242fb4 100644 --- a/rbi/openai/internal/type/enum.rbi +++ b/rbi/openai/internal/type/enum.rbi @@ -67,6 +67,11 @@ module OpenAI def dump(value, state:) end + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end + # @api private sig { params(depth: Integer).returns(String) } def inspect(depth: 0) diff --git a/rbi/openai/internal/type/file_input.rbi b/rbi/openai/internal/type/file_input.rbi index 19e6c2e1..09dde1b1 100644 --- a/rbi/openai/internal/type/file_input.rbi +++ b/rbi/openai/internal/type/file_input.rbi @@ -47,6 +47,11 @@ module OpenAI end def dump(value, state:) end + + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end end end end diff --git a/rbi/openai/internal/type/hash_of.rbi b/rbi/openai/internal/type/hash_of.rbi index 25123f07..d9f9f9ec 100644 --- a/rbi/openai/internal/type/hash_of.rbi +++ b/rbi/openai/internal/type/hash_of.rbi @@ -8,6 +8,7 @@ module OpenAI # Hash of items of a given type. class HashOf include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport abstract! @@ -63,6 +64,11 @@ module OpenAI def dump(value, state:) end + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end + # @api private sig { returns(Elem) } protected def item_type diff --git a/rbi/openai/internal/type/union.rbi b/rbi/openai/internal/type/union.rbi index f8598117..7c6958c1 100644 --- a/rbi/openai/internal/type/union.rbi +++ b/rbi/openai/internal/type/union.rbi @@ -101,6 +101,11 @@ module OpenAI def dump(value, state:) end + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end + # @api private sig { params(depth: Integer).returns(String) } def inspect(depth: 0) diff --git a/rbi/openai/internal/type/unknown.rbi b/rbi/openai/internal/type/unknown.rbi index 48a18c80..3b5d1139 100644 --- a/rbi/openai/internal/type/unknown.rbi +++ b/rbi/openai/internal/type/unknown.rbi @@ -8,6 +8,7 @@ module OpenAI # When we don't know what to expect for the value. class Unknown extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport abstract! @@ -43,6 +44,11 @@ module OpenAI end def dump(value, state:) end + + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end end end end diff --git a/rbi/openai/internal/util.rbi b/rbi/openai/internal/util.rbi index fe1e8cac..ddce5834 100644 --- a/rbi/openai/internal/util.rbi +++ b/rbi/openai/internal/util.rbi @@ -11,6 +11,15 @@ module OpenAI def self.monotonic_secs end + # @api private + sig do + params(ns: T.any(Module, T::Class[T.anything])).returns( + T::Enumerable[T.any(Module, T::Class[T.anything])] + ) + end + def self.walk_namespaces(ns) + end + class << self # @api private sig { returns(String) } @@ -441,10 +450,32 @@ module OpenAI def const_missing(name) end + # @api private + sig { params(name: Symbol).returns(T::Boolean) } + def sorbet_constant_defined?(name) + end + # @api private sig { params(name: Symbol, blk: T.proc.returns(T.anything)).void } def define_sorbet_constant!(name, &blk) end + + # @api private + sig { returns(T.anything) } + def to_sorbet_type + end + + class << self + # @api private + sig do + params( + type: + T.any(OpenAI::Internal::Util::SorbetRuntimeSupport, T.anything) + ).returns(T.anything) + end + def to_sorbet_type(type) + end + end end end end diff --git a/rbi/openai/models.rbi b/rbi/openai/models.rbi index 7de90e20..c4a20f0f 100644 --- a/rbi/openai/models.rbi +++ b/rbi/openai/models.rbi @@ -43,6 +43,16 @@ module OpenAI CompoundFilter = OpenAI::Models::CompoundFilter + ContainerCreateParams = OpenAI::Models::ContainerCreateParams + + ContainerDeleteParams = OpenAI::Models::ContainerDeleteParams + + ContainerListParams = OpenAI::Models::ContainerListParams + + ContainerRetrieveParams = OpenAI::Models::ContainerRetrieveParams + + Containers = OpenAI::Models::Containers + CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse Embedding = OpenAI::Models::Embedding @@ -59,12 +69,8 @@ module OpenAI EvalDeleteParams = OpenAI::Models::EvalDeleteParams - EvalItem = OpenAI::Models::EvalItem - EvalListParams = OpenAI::Models::EvalListParams - EvalLogsDataSourceConfig = OpenAI::Models::EvalLogsDataSourceConfig - EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams Evals = OpenAI::Models::Evals @@ -184,8 +190,6 @@ module OpenAI VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams - VectorStoreExpirationAfter = OpenAI::Models::VectorStoreExpirationAfter - VectorStoreListParams = OpenAI::Models::VectorStoreListParams VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams diff --git a/rbi/openai/models/audio/transcription_create_params.rbi b/rbi/openai/models/audio/transcription_create_params.rbi index e35cfeea..c3dc13df 100644 --- a/rbi/openai/models/audio/transcription_create_params.rbi +++ b/rbi/openai/models/audio/transcription_create_params.rbi @@ -17,7 +17,7 @@ module OpenAI # The audio file object (not file name) to transcribe, in one of these formats: # flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } + sig { returns(OpenAI::Internal::FileInput) } attr_accessor :file # ID of the model to use. The options are `gpt-4o-transcribe`, @@ -130,7 +130,7 @@ module OpenAI sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, model: T.any(String, OpenAI::AudioModel::OrSymbol), chunking_strategy: T.nilable( @@ -202,7 +202,7 @@ module OpenAI sig do override.returns( { - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, model: T.any(String, OpenAI::AudioModel::OrSymbol), chunking_strategy: T.nilable( diff --git a/rbi/openai/models/audio/translation_create_params.rbi b/rbi/openai/models/audio/translation_create_params.rbi index bcfb2484..1dc35166 100644 --- a/rbi/openai/models/audio/translation_create_params.rbi +++ b/rbi/openai/models/audio/translation_create_params.rbi @@ -17,7 +17,7 @@ module OpenAI # The audio file object (not file name) translate, in one of these formats: flac, # mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } + sig { returns(OpenAI::Internal::FileInput) } attr_accessor :file # ID of the model to use. Only `whisper-1` (which is powered by our open source @@ -67,7 +67,7 @@ module OpenAI sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, model: T.any(String, OpenAI::AudioModel::OrSymbol), prompt: String, response_format: @@ -104,7 +104,7 @@ module OpenAI sig do override.returns( { - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, model: T.any(String, OpenAI::AudioModel::OrSymbol), prompt: String, response_format: diff --git a/rbi/openai/models/beta/assistant.rbi b/rbi/openai/models/beta/assistant.rbi index 4b635b82..7842b859 100644 --- a/rbi/openai/models/beta/assistant.rbi +++ b/rbi/openai/models/beta/assistant.rbi @@ -54,17 +54,7 @@ module OpenAI # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. - sig do - returns( - T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::FileSearchTool, - OpenAI::Beta::FunctionTool - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Beta::AssistantTool::Variants]) } attr_accessor :tools # Specifies the format that the model must output. Compatible with @@ -89,14 +79,7 @@ module OpenAI # max context length. sig do returns( - T.nilable( - T.any( - Symbol, - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONObject, - OpenAI::ResponseFormatJSONSchema - ) - ) + T.nilable(OpenAI::Beta::AssistantResponseFormatOption::Variants) ) end attr_accessor :response_format @@ -245,22 +228,10 @@ module OpenAI model: String, name: T.nilable(String), object: Symbol, - tools: - T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::FileSearchTool, - OpenAI::Beta::FunctionTool - ) - ], + tools: T::Array[OpenAI::Beta::AssistantTool::Variants], response_format: T.nilable( - T.any( - Symbol, - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONObject, - OpenAI::ResponseFormatJSONSchema - ) + OpenAI::Beta::AssistantResponseFormatOption::Variants ), temperature: T.nilable(Float), tool_resources: T.nilable(OpenAI::Beta::Assistant::ToolResources), diff --git a/rbi/openai/models/beta/thread_create_and_run_params.rbi b/rbi/openai/models/beta/thread_create_and_run_params.rbi index 153977c8..2a603dd6 100644 --- a/rbi/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/openai/models/beta/thread_create_and_run_params.rbi @@ -188,13 +188,21 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { returns(T.nilable(OpenAI::Beta::TruncationObject)) } + sig do + returns( + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy + ) + ) + end attr_reader :truncation_strategy sig do params( truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash) + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ) ).void end attr_writer :truncation_strategy @@ -242,7 +250,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -382,7 +392,10 @@ module OpenAI ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Beta::TruncationObject), + truncation_strategy: + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy + ), request_options: OpenAI::RequestOptions } ) @@ -537,16 +550,7 @@ module OpenAI # The text contents of the message. sig do returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content::Variants ) end attr_accessor :content @@ -588,16 +592,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, - OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, - OpenAI::Beta::Threads::TextContentBlockParam::OrHash - ) - ] - ), + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content::Variants, role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, attachments: @@ -635,16 +630,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ), + OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Content::Variants, role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Role::OrSymbol, attachments: @@ -669,11 +655,7 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) + OpenAI::Beta::Threads::MessageContentPartParam::Variants ] ) end @@ -1450,6 +1432,103 @@ module OpenAI end end end + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy, + OpenAI::Internal::AnyHash + ) + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + sig do + returns( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol + ) + end + attr_accessor :type + + # The number of most recent messages from the thread when constructing the context + # for the run. + sig { returns(T.nilable(Integer)) } + attr_accessor :last_messages + + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + sig do + params( + type: + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + ).returns(T.attached_class) + end + def self.new( + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + type:, + # The number of most recent messages from the thread when constructing the context + # for the run. + last_messages: nil + ) + end + + sig do + override.returns( + { + type: + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end + def to_hash + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let( + :auto, + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + ) + LAST_MESSAGES = + T.let( + :last_messages, + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end end end end diff --git a/rbi/openai/models/beta/thread_create_params.rbi b/rbi/openai/models/beta/thread_create_params.rbi index aa3692ba..da3aedab 100644 --- a/rbi/openai/models/beta/thread_create_params.rbi +++ b/rbi/openai/models/beta/thread_create_params.rbi @@ -113,16 +113,7 @@ module OpenAI # The text contents of the message. sig do returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) + OpenAI::Beta::ThreadCreateParams::Message::Content::Variants ) end attr_accessor :content @@ -160,16 +151,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, - OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, - OpenAI::Beta::Threads::TextContentBlockParam::OrHash - ) - ] - ), + OpenAI::Beta::ThreadCreateParams::Message::Content::Variants, role: OpenAI::Beta::ThreadCreateParams::Message::Role::OrSymbol, attachments: T.nilable( @@ -206,16 +188,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ), + OpenAI::Beta::ThreadCreateParams::Message::Content::Variants, role: OpenAI::Beta::ThreadCreateParams::Message::Role::OrSymbol, attachments: T.nilable( @@ -239,11 +212,7 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) + OpenAI::Beta::Threads::MessageContentPartParam::Variants ] ) end diff --git a/rbi/openai/models/beta/threads/message.rbi b/rbi/openai/models/beta/threads/message.rbi index 82120763..c9a985aa 100644 --- a/rbi/openai/models/beta/threads/message.rbi +++ b/rbi/openai/models/beta/threads/message.rbi @@ -34,16 +34,7 @@ module OpenAI # The content of the message in array of text and/or images. sig do - returns( - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlock, - OpenAI::Beta::Threads::RefusalContentBlock - ) - ] - ) + returns(T::Array[OpenAI::Beta::Threads::MessageContent::Variants]) end attr_accessor :content @@ -194,14 +185,7 @@ module OpenAI ), completed_at: T.nilable(Integer), content: - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlock, - OpenAI::Beta::Threads::RefusalContentBlock - ) - ], + T::Array[OpenAI::Beta::Threads::MessageContent::Variants], created_at: Integer, incomplete_at: T.nilable(Integer), incomplete_details: @@ -239,10 +223,7 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) + OpenAI::Beta::Threads::Message::Attachment::Tool::Variants ] ) ) @@ -288,10 +269,7 @@ module OpenAI file_id: String, tools: T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly - ) + OpenAI::Beta::Threads::Message::Attachment::Tool::Variants ] } ) diff --git a/rbi/openai/models/beta/threads/message_create_params.rbi b/rbi/openai/models/beta/threads/message_create_params.rbi index 4e6416b0..0fca1da4 100644 --- a/rbi/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/openai/models/beta/threads/message_create_params.rbi @@ -19,16 +19,7 @@ module OpenAI # The text contents of the message. sig do returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) + OpenAI::Beta::Threads::MessageCreateParams::Content::Variants ) end attr_accessor :content @@ -66,16 +57,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, - OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, - OpenAI::Beta::Threads::TextContentBlockParam::OrHash - ) - ] - ), + OpenAI::Beta::Threads::MessageCreateParams::Content::Variants, role: OpenAI::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable( @@ -114,16 +96,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ), + OpenAI::Beta::Threads::MessageCreateParams::Content::Variants, role: OpenAI::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: @@ -149,11 +122,7 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) + OpenAI::Beta::Threads::MessageContentPartParam::Variants ] ) end diff --git a/rbi/openai/models/beta/threads/message_delta.rbi b/rbi/openai/models/beta/threads/message_delta.rbi index f234666f..7348c453 100644 --- a/rbi/openai/models/beta/threads/message_delta.rbi +++ b/rbi/openai/models/beta/threads/message_delta.rbi @@ -17,14 +17,7 @@ module OpenAI sig do returns( T.nilable( - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Beta::Threads::TextDeltaBlock, - OpenAI::Beta::Threads::RefusalDeltaBlock, - OpenAI::Beta::Threads::ImageURLDeltaBlock - ) - ] + T::Array[OpenAI::Beta::Threads::MessageContentDelta::Variants] ) ) end @@ -88,12 +81,7 @@ module OpenAI { content: T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileDeltaBlock, - OpenAI::Beta::Threads::TextDeltaBlock, - OpenAI::Beta::Threads::RefusalDeltaBlock, - OpenAI::Beta::Threads::ImageURLDeltaBlock - ) + OpenAI::Beta::Threads::MessageContentDelta::Variants ], role: OpenAI::Beta::Threads::MessageDelta::Role::TaggedSymbol } diff --git a/rbi/openai/models/beta/threads/run.rbi b/rbi/openai/models/beta/threads/run.rbi index e6879317..7940c801 100644 --- a/rbi/openai/models/beta/threads/run.rbi +++ b/rbi/openai/models/beta/threads/run.rbi @@ -143,14 +143,7 @@ module OpenAI # max context length. sig do returns( - T.nilable( - T.any( - Symbol, - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONObject, - OpenAI::ResponseFormatJSONSchema - ) - ) + T.nilable(OpenAI::Beta::AssistantResponseFormatOption::Variants) ) end attr_accessor :response_format @@ -179,12 +172,7 @@ module OpenAI # call that tool. sig do returns( - T.nilable( - T.any( - OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Beta::AssistantToolChoice - ) - ) + T.nilable(OpenAI::Beta::AssistantToolChoiceOption::Variants) ) end attr_accessor :tool_choice @@ -192,28 +180,22 @@ module OpenAI # The list of tools that the # [assistant](https://platform.openai.com/docs/api-reference/assistants) used for # this run. - sig do - returns( - T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::FileSearchTool, - OpenAI::Beta::FunctionTool - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Beta::AssistantTool::Variants]) } attr_accessor :tools # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { returns(T.nilable(OpenAI::Beta::TruncationObject)) } + sig do + returns(T.nilable(OpenAI::Beta::Threads::Run::TruncationStrategy)) + end attr_reader :truncation_strategy sig do params( truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash) + T.nilable( + OpenAI::Beta::Threads::Run::TruncationStrategy::OrHash + ) ).void end attr_writer :truncation_strategy @@ -291,7 +273,9 @@ module OpenAI ) ], truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::Threads::Run::TruncationStrategy::OrHash + ), usage: T.nilable(OpenAI::Beta::Threads::Run::Usage::OrHash), temperature: T.nilable(Float), top_p: T.nilable(Float), @@ -429,32 +413,16 @@ module OpenAI T.nilable(OpenAI::Beta::Threads::Run::RequiredAction), response_format: T.nilable( - T.any( - Symbol, - OpenAI::ResponseFormatText, - OpenAI::ResponseFormatJSONObject, - OpenAI::ResponseFormatJSONSchema - ) + OpenAI::Beta::AssistantResponseFormatOption::Variants ), started_at: T.nilable(Integer), status: OpenAI::Beta::Threads::RunStatus::TaggedSymbol, thread_id: String, tool_choice: - T.nilable( - T.any( - OpenAI::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, - OpenAI::Beta::AssistantToolChoice - ) - ), - tools: - T::Array[ - T.any( - OpenAI::Beta::CodeInterpreterTool, - OpenAI::Beta::FileSearchTool, - OpenAI::Beta::FunctionTool - ) - ], - truncation_strategy: T.nilable(OpenAI::Beta::TruncationObject), + T.nilable(OpenAI::Beta::AssistantToolChoiceOption::Variants), + tools: T::Array[OpenAI::Beta::AssistantTool::Variants], + truncation_strategy: + T.nilable(OpenAI::Beta::Threads::Run::TruncationStrategy), usage: T.nilable(OpenAI::Beta::Threads::Run::Usage), temperature: T.nilable(Float), top_p: T.nilable(Float) @@ -746,6 +714,103 @@ module OpenAI end end + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::Threads::Run::TruncationStrategy, + OpenAI::Internal::AnyHash + ) + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + sig do + returns( + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ) + end + attr_accessor :type + + # The number of most recent messages from the thread when constructing the context + # for the run. + sig { returns(T.nilable(Integer)) } + attr_accessor :last_messages + + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + sig do + params( + type: + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + ).returns(T.attached_class) + end + def self.new( + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + type:, + # The number of most recent messages from the thread when constructing the context + # for the run. + last_messages: nil + ) + end + + sig do + override.returns( + { + type: + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol, + last_messages: T.nilable(Integer) + } + ) + end + def to_hash + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::Run::TruncationStrategy::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let( + :auto, + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ) + LAST_MESSAGES = + T.let( + :last_messages, + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + class Usage < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do diff --git a/rbi/openai/models/beta/threads/run_create_params.rbi b/rbi/openai/models/beta/threads/run_create_params.rbi index e274167c..bb9d6882 100644 --- a/rbi/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/openai/models/beta/threads/run_create_params.rbi @@ -206,13 +206,21 @@ module OpenAI # Controls for how a thread will be truncated prior to the run. Use this to # control the intial context window of the run. - sig { returns(T.nilable(OpenAI::Beta::TruncationObject)) } + sig do + returns( + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy + ) + ) + end attr_reader :truncation_strategy sig do params( truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash) + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ) ).void end attr_writer :truncation_strategy @@ -265,7 +273,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -426,7 +436,10 @@ module OpenAI ] ), top_p: T.nilable(Float), - truncation_strategy: T.nilable(OpenAI::Beta::TruncationObject), + truncation_strategy: + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy + ), request_options: OpenAI::RequestOptions } ) @@ -446,16 +459,7 @@ module OpenAI # The text contents of the message. sig do returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ) + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content::Variants ) end attr_accessor :content @@ -497,16 +501,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, - OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, - OpenAI::Beta::Threads::TextContentBlockParam::OrHash - ) - ] - ), + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content::Variants, role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, attachments: @@ -544,16 +539,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) - ] - ), + OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Content::Variants, role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Role::OrSymbol, attachments: @@ -578,11 +564,7 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock, - OpenAI::Beta::Threads::ImageURLContentBlock, - OpenAI::Beta::Threads::TextContentBlockParam - ) + OpenAI::Beta::Threads::MessageContentPartParam::Variants ] ) end @@ -796,6 +778,103 @@ module OpenAI def self.variants end end + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy, + OpenAI::Internal::AnyHash + ) + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + sig do + returns( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol + ) + end + attr_accessor :type + + # The number of most recent messages from the thread when constructing the context + # for the run. + sig { returns(T.nilable(Integer)) } + attr_accessor :last_messages + + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + sig do + params( + type: + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + ).returns(T.attached_class) + end + def self.new( + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + type:, + # The number of most recent messages from the thread when constructing the context + # for the run. + last_messages: nil + ) + end + + sig do + override.returns( + { + type: + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::OrSymbol, + last_messages: T.nilable(Integer) + } + ) + end + def to_hash + end + + # The truncation strategy to use for the thread. The default is `auto`. If set to + # `last_messages`, the thread will be truncated to the n most recent messages in + # the thread. When set to `auto`, messages in the middle of the thread will be + # dropped to fit the context length of the model, `max_prompt_tokens`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let( + :auto, + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + ) + LAST_MESSAGES = + T.let( + :last_messages, + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end end end end diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi index 2989b129..1b4be0d3 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call.rbi @@ -91,10 +91,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Variants ] ) end @@ -129,10 +126,7 @@ module OpenAI input: String, outputs: T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image - ) + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Variants ] } ) diff --git a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi index 028059df..03693d1b 100644 --- a/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbi @@ -108,10 +108,7 @@ module OpenAI returns( T.nilable( T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage - ) + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output::Variants ] ) ) @@ -160,10 +157,7 @@ module OpenAI input: String, outputs: T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterLogs, - OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage - ) + OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output::Variants ] } ) diff --git a/rbi/openai/models/beta/threads/runs/run_step.rbi b/rbi/openai/models/beta/threads/runs/run_step.rbi index 1204d5fe..1a87ede6 100644 --- a/rbi/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step.rbi @@ -96,10 +96,7 @@ module OpenAI # The details of the run step. sig do returns( - T.any( - OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Beta::Threads::Runs::ToolCallsStepDetails - ) + OpenAI::Beta::Threads::Runs::RunStep::StepDetails::Variants ) end attr_accessor :step_details @@ -228,10 +225,7 @@ module OpenAI status: OpenAI::Beta::Threads::Runs::RunStep::Status::TaggedSymbol, step_details: - T.any( - OpenAI::Beta::Threads::Runs::MessageCreationStepDetails, - OpenAI::Beta::Threads::Runs::ToolCallsStepDetails - ), + OpenAI::Beta::Threads::Runs::RunStep::StepDetails::Variants, thread_id: String, type: OpenAI::Beta::Threads::Runs::RunStep::Type::TaggedSymbol, diff --git a/rbi/openai/models/beta/threads/runs/run_step_delta.rbi b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi index cbe1297d..ba659f76 100644 --- a/rbi/openai/models/beta/threads/runs/run_step_delta.rbi +++ b/rbi/openai/models/beta/threads/runs/run_step_delta.rbi @@ -20,10 +20,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Beta::Threads::Runs::ToolCallDeltaObject - ) + OpenAI::Beta::Threads::Runs::RunStepDelta::StepDetails::Variants ) ) end @@ -60,10 +57,7 @@ module OpenAI override.returns( { step_details: - T.any( - OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta, - OpenAI::Beta::Threads::Runs::ToolCallDeltaObject - ) + OpenAI::Beta::Threads::Runs::RunStepDelta::StepDetails::Variants } ) end diff --git a/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi b/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi index e034b7e3..0dcb1dac 100644 --- a/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi +++ b/rbi/openai/models/beta/threads/runs/tool_call_delta_object.rbi @@ -24,13 +24,7 @@ module OpenAI sig do returns( T.nilable( - T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Beta::Threads::Runs::FunctionToolCallDelta - ) - ] + T::Array[OpenAI::Beta::Threads::Runs::ToolCallDelta::Variants] ) ) end @@ -80,11 +74,7 @@ module OpenAI type: Symbol, tool_calls: T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta, - OpenAI::Beta::Threads::Runs::FileSearchToolCallDelta, - OpenAI::Beta::Threads::Runs::FunctionToolCallDelta - ) + OpenAI::Beta::Threads::Runs::ToolCallDelta::Variants ] } ) diff --git a/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi b/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi index 61ef18e1..cda8ec9b 100644 --- a/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi +++ b/rbi/openai/models/beta/threads/runs/tool_calls_step_details.rbi @@ -18,15 +18,7 @@ module OpenAI # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. sig do - returns( - T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Beta::Threads::Runs::FunctionToolCall - ) - ] - ) + returns(T::Array[OpenAI::Beta::Threads::Runs::ToolCall::Variants]) end attr_accessor :tool_calls @@ -62,13 +54,7 @@ module OpenAI override.returns( { tool_calls: - T::Array[ - T.any( - OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall, - OpenAI::Beta::Threads::Runs::FileSearchToolCall, - OpenAI::Beta::Threads::Runs::FunctionToolCall - ) - ], + T::Array[OpenAI::Beta::Threads::Runs::ToolCall::Variants], type: Symbol } ) diff --git a/rbi/openai/models/beta/threads/text.rbi b/rbi/openai/models/beta/threads/text.rbi index 25110168..e36d074a 100644 --- a/rbi/openai/models/beta/threads/text.rbi +++ b/rbi/openai/models/beta/threads/text.rbi @@ -10,16 +10,7 @@ module OpenAI T.any(OpenAI::Beta::Threads::Text, OpenAI::Internal::AnyHash) end - sig do - returns( - T::Array[ - T.any( - OpenAI::Beta::Threads::FileCitationAnnotation, - OpenAI::Beta::Threads::FilePathAnnotation - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Beta::Threads::Annotation::Variants]) } attr_accessor :annotations # The data that makes up the text. @@ -49,12 +40,7 @@ module OpenAI override.returns( { annotations: - T::Array[ - T.any( - OpenAI::Beta::Threads::FileCitationAnnotation, - OpenAI::Beta::Threads::FilePathAnnotation - ) - ], + T::Array[OpenAI::Beta::Threads::Annotation::Variants], value: String } ) diff --git a/rbi/openai/models/beta/threads/text_delta.rbi b/rbi/openai/models/beta/threads/text_delta.rbi index 546b0523..f784ef87 100644 --- a/rbi/openai/models/beta/threads/text_delta.rbi +++ b/rbi/openai/models/beta/threads/text_delta.rbi @@ -13,12 +13,7 @@ module OpenAI sig do returns( T.nilable( - T::Array[ - T.any( - OpenAI::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Beta::Threads::FilePathDeltaAnnotation - ) - ] + T::Array[OpenAI::Beta::Threads::AnnotationDelta::Variants] ) ) end @@ -67,12 +62,7 @@ module OpenAI override.returns( { annotations: - T::Array[ - T.any( - OpenAI::Beta::Threads::FileCitationDeltaAnnotation, - OpenAI::Beta::Threads::FilePathDeltaAnnotation - ) - ], + T::Array[OpenAI::Beta::Threads::AnnotationDelta::Variants], value: String } ) diff --git a/rbi/openai/models/beta/truncation_object.rbi b/rbi/openai/models/beta/truncation_object.rbi deleted file mode 100644 index c763ead6..00000000 --- a/rbi/openai/models/beta/truncation_object.rbi +++ /dev/null @@ -1,85 +0,0 @@ -# typed: strong - -module OpenAI - module Models - module Beta - class TruncationObject < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any(OpenAI::Beta::TruncationObject, OpenAI::Internal::AnyHash) - end - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - sig { returns(OpenAI::Beta::TruncationObject::Type::OrSymbol) } - attr_accessor :type - - # The number of most recent messages from the thread when constructing the context - # for the run. - sig { returns(T.nilable(Integer)) } - attr_accessor :last_messages - - # Controls for how a thread will be truncated prior to the run. Use this to - # control the intial context window of the run. - sig do - params( - type: OpenAI::Beta::TruncationObject::Type::OrSymbol, - last_messages: T.nilable(Integer) - ).returns(T.attached_class) - end - def self.new( - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - type:, - # The number of most recent messages from the thread when constructing the context - # for the run. - last_messages: nil - ) - end - - sig do - override.returns( - { - type: OpenAI::Beta::TruncationObject::Type::OrSymbol, - last_messages: T.nilable(Integer) - } - ) - end - def to_hash - end - - # The truncation strategy to use for the thread. The default is `auto`. If set to - # `last_messages`, the thread will be truncated to the n most recent messages in - # the thread. When set to `auto`, messages in the middle of the thread will be - # dropped to fit the context length of the model, `max_prompt_tokens`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias { T.all(Symbol, OpenAI::Beta::TruncationObject::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String) } - - AUTO = - T.let(:auto, OpenAI::Beta::TruncationObject::Type::TaggedSymbol) - LAST_MESSAGES = - T.let( - :last_messages, - OpenAI::Beta::TruncationObject::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[OpenAI::Beta::TruncationObject::Type::TaggedSymbol] - ) - end - def self.values - end - end - end - end - end -end diff --git a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi index 2e10edd1..b2233b53 100644 --- a/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_assistant_message_param.rbi @@ -43,15 +43,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartRefusal - ) - ] - ) + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::Variants ) ) end @@ -115,15 +107,7 @@ module OpenAI ), content: T.nilable( - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText::OrHash, - OpenAI::Chat::ChatCompletionContentPartRefusal::OrHash - ) - ] - ) + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::Variants ), function_call: T.nilable( @@ -168,15 +152,7 @@ module OpenAI ), content: T.nilable( - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartRefusal - ) - ] - ) + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::Variants ), function_call: T.nilable( @@ -228,10 +204,7 @@ module OpenAI T.any( String, T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartRefusal - ) + OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart::Variants ] ) end diff --git a/rbi/openai/models/chat/chat_completion_developer_message_param.rbi b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi index 8326b4e7..79d40f0c 100644 --- a/rbi/openai/models/chat/chat_completion_developer_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_developer_message_param.rbi @@ -18,7 +18,7 @@ module OpenAI # The contents of the developer message. sig do returns( - T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content::Variants ) end attr_accessor :content @@ -41,10 +41,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] - ), + OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content::Variants, name: String, role: Symbol ).returns(T.attached_class) @@ -64,10 +61,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText] - ), + OpenAI::Chat::ChatCompletionDeveloperMessageParam::Content::Variants, role: Symbol, name: String } diff --git a/rbi/openai/models/chat/chat_completion_message.rbi b/rbi/openai/models/chat/chat_completion_message.rbi index 85e74838..99aa4116 100644 --- a/rbi/openai/models/chat/chat_completion_message.rbi +++ b/rbi/openai/models/chat/chat_completion_message.rbi @@ -18,6 +18,10 @@ module OpenAI sig { returns(T.nilable(String)) } attr_accessor :content + # The parsed contents of the message, if JSON schema is specified. + sig { returns(T.nilable(T.anything)) } + attr_accessor :parsed + # The refusal message generated by the model. sig { returns(T.nilable(String)) } attr_accessor :refusal diff --git a/rbi/openai/models/chat/chat_completion_message_tool_call.rbi b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi index 0e512346..97da5d08 100644 --- a/rbi/openai/models/chat/chat_completion_message_tool_call.rbi +++ b/rbi/openai/models/chat/chat_completion_message_tool_call.rbi @@ -80,6 +80,10 @@ module OpenAI sig { returns(String) } attr_accessor :arguments + # The parsed contents of the arguments. + sig { returns(T.anything) } + attr_accessor :parsed + # The name of the function to call. sig { returns(String) } attr_accessor :name diff --git a/rbi/openai/models/chat/chat_completion_prediction_content.rbi b/rbi/openai/models/chat/chat_completion_prediction_content.rbi index 3e8b7a9c..e6fde114 100644 --- a/rbi/openai/models/chat/chat_completion_prediction_content.rbi +++ b/rbi/openai/models/chat/chat_completion_prediction_content.rbi @@ -19,7 +19,7 @@ module OpenAI # returned much more quickly. sig do returns( - T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + OpenAI::Chat::ChatCompletionPredictionContent::Content::Variants ) end attr_accessor :content @@ -34,10 +34,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] - ), + OpenAI::Chat::ChatCompletionPredictionContent::Content::Variants, type: Symbol ).returns(T.attached_class) end @@ -56,10 +53,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText] - ), + OpenAI::Chat::ChatCompletionPredictionContent::Content::Variants, type: Symbol } ) diff --git a/rbi/openai/models/chat/chat_completion_system_message_param.rbi b/rbi/openai/models/chat/chat_completion_system_message_param.rbi index bd7d25b4..9a4f0597 100644 --- a/rbi/openai/models/chat/chat_completion_system_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_system_message_param.rbi @@ -17,7 +17,7 @@ module OpenAI # The contents of the system message. sig do returns( - T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + OpenAI::Chat::ChatCompletionSystemMessageParam::Content::Variants ) end attr_accessor :content @@ -40,10 +40,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] - ), + OpenAI::Chat::ChatCompletionSystemMessageParam::Content::Variants, name: String, role: Symbol ).returns(T.attached_class) @@ -63,10 +60,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText] - ), + OpenAI::Chat::ChatCompletionSystemMessageParam::Content::Variants, role: Symbol, name: String } diff --git a/rbi/openai/models/chat/chat_completion_tool_message_param.rbi b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi index 6aab530f..c0ba7e87 100644 --- a/rbi/openai/models/chat/chat_completion_tool_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_tool_message_param.rbi @@ -17,7 +17,7 @@ module OpenAI # The contents of the tool message. sig do returns( - T.any(String, T::Array[OpenAI::Chat::ChatCompletionContentPartText]) + OpenAI::Chat::ChatCompletionToolMessageParam::Content::Variants ) end attr_accessor :content @@ -33,10 +33,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText::OrHash] - ), + OpenAI::Chat::ChatCompletionToolMessageParam::Content::Variants, tool_call_id: String, role: Symbol ).returns(T.attached_class) @@ -55,10 +52,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[OpenAI::Chat::ChatCompletionContentPartText] - ), + OpenAI::Chat::ChatCompletionToolMessageParam::Content::Variants, role: Symbol, tool_call_id: String } diff --git a/rbi/openai/models/chat/chat_completion_user_message_param.rbi b/rbi/openai/models/chat/chat_completion_user_message_param.rbi index d53e5739..f0fdf008 100644 --- a/rbi/openai/models/chat/chat_completion_user_message_param.rbi +++ b/rbi/openai/models/chat/chat_completion_user_message_param.rbi @@ -17,17 +17,7 @@ module OpenAI # The contents of the user message. sig do returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartImage, - OpenAI::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Chat::ChatCompletionContentPart::File - ) - ] - ) + OpenAI::Chat::ChatCompletionUserMessageParam::Content::Variants ) end attr_accessor :content @@ -49,17 +39,7 @@ module OpenAI sig do params( content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText::OrHash, - OpenAI::Chat::ChatCompletionContentPartImage::OrHash, - OpenAI::Chat::ChatCompletionContentPartInputAudio::OrHash, - OpenAI::Chat::ChatCompletionContentPart::File::OrHash - ) - ] - ), + OpenAI::Chat::ChatCompletionUserMessageParam::Content::Variants, name: String, role: Symbol ).returns(T.attached_class) @@ -79,17 +59,7 @@ module OpenAI override.returns( { content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartImage, - OpenAI::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Chat::ChatCompletionContentPart::File - ) - ] - ), + OpenAI::Chat::ChatCompletionUserMessageParam::Content::Variants, role: Symbol, name: String } @@ -106,14 +76,7 @@ module OpenAI T.type_alias do T.any( String, - T::Array[ - T.any( - OpenAI::Chat::ChatCompletionContentPartText, - OpenAI::Chat::ChatCompletionContentPartImage, - OpenAI::Chat::ChatCompletionContentPartInputAudio, - OpenAI::Chat::ChatCompletionContentPart::File - ) - ] + T::Array[OpenAI::Chat::ChatCompletionContentPart::Variants] ) end diff --git a/rbi/openai/models/chat/completion_create_params.rbi b/rbi/openai/models/chat/completion_create_params.rbi index a0f4b474..47e3715b 100644 --- a/rbi/openai/models/chat/completion_create_params.rbi +++ b/rbi/openai/models/chat/completion_create_params.rbi @@ -241,6 +241,7 @@ module OpenAI T.any( OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, + OpenAI::StructuredOutput::JsonSchemaConverter, OpenAI::ResponseFormatJSONObject ) ) @@ -254,6 +255,7 @@ module OpenAI T.any( OpenAI::ResponseFormatText::OrHash, OpenAI::ResponseFormatJSONSchema::OrHash, + OpenAI::StructuredOutput::JsonSchemaConverter, OpenAI::ResponseFormatJSONObject::OrHash ) ).void @@ -298,7 +300,11 @@ module OpenAI # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - sig { returns(T.nilable(T.any(String, T::Array[String]))) } + sig do + returns( + T.nilable(OpenAI::Chat::CompletionCreateParams::Stop::Variants) + ) + end attr_accessor :stop # Whether or not to store the output of this chat completion request for use in @@ -365,7 +371,15 @@ module OpenAI attr_reader :tools sig do - params(tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash]).void + params( + tools: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionTool::OrHash, + OpenAI::StructuredOutput::JsonSchemaConverter + ) + ] + ).void end attr_writer :tools @@ -383,8 +397,8 @@ module OpenAI sig { returns(T.nilable(Float)) } attr_accessor :top_p - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -454,6 +468,7 @@ module OpenAI T.any( OpenAI::ResponseFormatText::OrHash, OpenAI::ResponseFormatJSONSchema::OrHash, + OpenAI::StructuredOutput::JsonSchemaConverter, OpenAI::ResponseFormatJSONObject::OrHash ), seed: T.nilable(Integer), @@ -461,7 +476,8 @@ module OpenAI T.nilable( OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol ), - stop: T.nilable(T.any(String, T::Array[String])), + stop: + T.nilable(OpenAI::Chat::CompletionCreateParams::Stop::Variants), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), @@ -471,7 +487,13 @@ module OpenAI OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash ), - tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash], + tools: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionTool::OrHash, + OpenAI::StructuredOutput::JsonSchemaConverter + ) + ], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, @@ -661,8 +683,8 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # This tool searches the web for relevant results to use in a response. Learn more @@ -725,7 +747,8 @@ module OpenAI T.nilable( OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol ), - stop: T.nilable(T.any(String, T::Array[String])), + stop: + T.nilable(OpenAI::Chat::CompletionCreateParams::Stop::Variants), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions), @@ -735,7 +758,13 @@ module OpenAI OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Chat::ChatCompletionNamedToolChoice ), - tools: T::Array[OpenAI::Chat::ChatCompletionTool], + tools: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionTool, + OpenAI::StructuredOutput::JsonSchemaConverter + ) + ], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, @@ -963,6 +992,7 @@ module OpenAI T.any( OpenAI::ResponseFormatText, OpenAI::ResponseFormatJSONSchema, + OpenAI::StructuredOutput::JsonSchemaConverter, OpenAI::ResponseFormatJSONObject ) end diff --git a/rbi/openai/models/chat_model.rbi b/rbi/openai/models/chat_model.rbi index 106901df..5046338e 100644 --- a/rbi/openai/models/chat_model.rbi +++ b/rbi/openai/models/chat_model.rbi @@ -75,6 +75,8 @@ module OpenAI ) CHATGPT_4O_LATEST = T.let(:"chatgpt-4o-latest", OpenAI::ChatModel::TaggedSymbol) + CODEX_MINI_LATEST = + T.let(:"codex-mini-latest", OpenAI::ChatModel::TaggedSymbol) GPT_4O_MINI = T.let(:"gpt-4o-mini", OpenAI::ChatModel::TaggedSymbol) GPT_4O_MINI_2024_07_18 = T.let(:"gpt-4o-mini-2024-07-18", OpenAI::ChatModel::TaggedSymbol) diff --git a/rbi/openai/models/comparison_filter.rbi b/rbi/openai/models/comparison_filter.rbi index c18ff737..9ae08eed 100644 --- a/rbi/openai/models/comparison_filter.rbi +++ b/rbi/openai/models/comparison_filter.rbi @@ -25,7 +25,7 @@ module OpenAI # The value to compare against the attribute key; supports string, number, or # boolean types. - sig { returns(T.any(String, Float, T::Boolean)) } + sig { returns(OpenAI::ComparisonFilter::Value::Variants) } attr_accessor :value # A filter used to compare a specified attribute key to a given value using a @@ -34,7 +34,7 @@ module OpenAI params( key: String, type: OpenAI::ComparisonFilter::Type::OrSymbol, - value: T.any(String, Float, T::Boolean) + value: OpenAI::ComparisonFilter::Value::Variants ).returns(T.attached_class) end def self.new( @@ -60,7 +60,7 @@ module OpenAI { key: String, type: OpenAI::ComparisonFilter::Type::OrSymbol, - value: T.any(String, Float, T::Boolean) + value: OpenAI::ComparisonFilter::Value::Variants } ) end diff --git a/rbi/openai/models/completion_create_params.rbi b/rbi/openai/models/completion_create_params.rbi index 4f9c3b9c..6a8a8a87 100644 --- a/rbi/openai/models/completion_create_params.rbi +++ b/rbi/openai/models/completion_create_params.rbi @@ -28,16 +28,7 @@ module OpenAI # training, so if a prompt is not specified the model will generate as if from the # beginning of a new document. sig do - returns( - T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ) - ) + returns(T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants)) end attr_accessor :prompt @@ -128,7 +119,7 @@ module OpenAI # # Up to 4 sequences where the API will stop generating further tokens. The # returned text will not contain the stop sequence. - sig { returns(T.nilable(T.any(String, T::Array[String]))) } + sig { returns(T.nilable(OpenAI::CompletionCreateParams::Stop::Variants)) } attr_accessor :stop # Options for streaming response. Only set this when you set `stream: true`. @@ -177,15 +168,7 @@ module OpenAI sig do params( model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), - prompt: - T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -195,7 +178,7 @@ module OpenAI n: T.nilable(Integer), presence_penalty: T.nilable(Float), seed: T.nilable(Integer), - stop: T.nilable(T.any(String, T::Array[String])), + stop: T.nilable(OpenAI::CompletionCreateParams::Stop::Variants), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), suffix: T.nilable(String), @@ -320,15 +303,7 @@ module OpenAI { model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), - prompt: - T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -338,7 +313,7 @@ module OpenAI n: T.nilable(Integer), presence_penalty: T.nilable(Float), seed: T.nilable(Integer), - stop: T.nilable(T.any(String, T::Array[String])), + stop: T.nilable(OpenAI::CompletionCreateParams::Stop::Variants), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions), suffix: T.nilable(String), diff --git a/rbi/openai/models/container_create_params.rbi b/rbi/openai/models/container_create_params.rbi new file mode 100644 index 00000000..ad4342cb --- /dev/null +++ b/rbi/openai/models/container_create_params.rbi @@ -0,0 +1,145 @@ +# typed: strong + +module OpenAI + module Models + class ContainerCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any(OpenAI::ContainerCreateParams, OpenAI::Internal::AnyHash) + end + + # Name of the container to create. + sig { returns(String) } + attr_accessor :name + + # Container expiration time in seconds relative to the 'anchor' time. + sig { returns(T.nilable(OpenAI::ContainerCreateParams::ExpiresAfter)) } + attr_reader :expires_after + + sig do + params( + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter::OrHash + ).void + end + attr_writer :expires_after + + # IDs of files to copy to the container. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :file_ids + + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids + + sig do + params( + name: String, + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter::OrHash, + file_ids: T::Array[String], + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # Name of the container to create. + name:, + # Container expiration time in seconds relative to the 'anchor' time. + expires_after: nil, + # IDs of files to copy to the container. + file_ids: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + name: String, + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter, + file_ids: T::Array[String], + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::ContainerCreateParams::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # Time anchor for the expiration time. Currently only 'last_active_at' is + # supported. + sig do + returns(OpenAI::ContainerCreateParams::ExpiresAfter::Anchor::OrSymbol) + end + attr_accessor :anchor + + sig { returns(Integer) } + attr_accessor :minutes + + # Container expiration time in seconds relative to the 'anchor' time. + sig do + params( + anchor: + OpenAI::ContainerCreateParams::ExpiresAfter::Anchor::OrSymbol, + minutes: Integer + ).returns(T.attached_class) + end + def self.new( + # Time anchor for the expiration time. Currently only 'last_active_at' is + # supported. + anchor:, + minutes: + ) + end + + sig do + override.returns( + { + anchor: + OpenAI::ContainerCreateParams::ExpiresAfter::Anchor::OrSymbol, + minutes: Integer + } + ) + end + def to_hash + end + + # Time anchor for the expiration time. Currently only 'last_active_at' is + # supported. + module Anchor + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::ContainerCreateParams::ExpiresAfter::Anchor) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LAST_ACTIVE_AT = + T.let( + :last_active_at, + OpenAI::ContainerCreateParams::ExpiresAfter::Anchor::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::ContainerCreateParams::ExpiresAfter::Anchor::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/container_create_response.rbi b/rbi/openai/models/container_create_response.rbi new file mode 100644 index 00000000..4552eb5f --- /dev/null +++ b/rbi/openai/models/container_create_response.rbi @@ -0,0 +1,192 @@ +# typed: strong + +module OpenAI + module Models + class ContainerCreateResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerCreateResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the container. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the container was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Name of the container. + sig { returns(String) } + attr_accessor :name + + # The type of this object. + sig { returns(String) } + attr_accessor :object + + # Status of the container (e.g., active, deleted). + sig { returns(String) } + attr_accessor :status + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + returns( + T.nilable(OpenAI::Models::ContainerCreateResponse::ExpiresAfter) + ) + end + attr_reader :expires_after + + sig do + params( + expires_after: + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::OrHash + ).void + end + attr_writer :expires_after + + sig do + params( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::OrHash + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the container. + id:, + # Unix timestamp (in seconds) when the container was created. + created_at:, + # Name of the container. + name:, + # The type of this object. + object:, + # Status of the container (e.g., active, deleted). + status:, + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + expires_after: nil + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter + } + ) + end + def to_hash + end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerCreateResponse::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # The reference point for the expiration. + sig do + returns( + T.nilable( + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + ) + end + attr_reader :anchor + + sig do + params( + anchor: + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::OrSymbol + ).void + end + attr_writer :anchor + + # The number of minutes after the anchor before the container expires. + sig { returns(T.nilable(Integer)) } + attr_reader :minutes + + sig { params(minutes: Integer).void } + attr_writer :minutes + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + params( + anchor: + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::OrSymbol, + minutes: Integer + ).returns(T.attached_class) + end + def self.new( + # The reference point for the expiration. + anchor: nil, + # The number of minutes after the anchor before the container expires. + minutes: nil + ) + end + + sig do + override.returns( + { + anchor: + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::TaggedSymbol, + minutes: Integer + } + ) + end + def to_hash + end + + # The reference point for the expiration. + module Anchor + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LAST_ACTIVE_AT = + T.let( + :last_active_at, + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::Anchor::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/container_delete_params.rbi b/rbi/openai/models/container_delete_params.rbi new file mode 100644 index 00000000..85d0c862 --- /dev/null +++ b/rbi/openai/models/container_delete_params.rbi @@ -0,0 +1,27 @@ +# typed: strong + +module OpenAI + module Models + class ContainerDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any(OpenAI::ContainerDeleteParams, OpenAI::Internal::AnyHash) + end + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end +end diff --git a/rbi/openai/models/container_list_params.rbi b/rbi/openai/models/container_list_params.rbi new file mode 100644 index 00000000..3f3ebd63 --- /dev/null +++ b/rbi/openai/models/container_list_params.rbi @@ -0,0 +1,99 @@ +# typed: strong + +module OpenAI + module Models + class ContainerListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any(OpenAI::ContainerListParams, OpenAI::Internal::AnyHash) + end + + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + sig { returns(T.nilable(String)) } + attr_reader :after + + sig { params(after: String).void } + attr_writer :after + + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + sig { returns(T.nilable(Integer)) } + attr_reader :limit + + sig { params(limit: Integer).void } + attr_writer :limit + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + sig { returns(T.nilable(OpenAI::ContainerListParams::Order::OrSymbol)) } + attr_reader :order + + sig { params(order: OpenAI::ContainerListParams::Order::OrSymbol).void } + attr_writer :order + + sig do + params( + after: String, + limit: Integer, + order: OpenAI::ContainerListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + after: String, + limit: Integer, + order: OpenAI::ContainerListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + module Order + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias { T.all(Symbol, OpenAI::ContainerListParams::Order) } + OrSymbol = T.type_alias { T.any(Symbol, String) } + + ASC = T.let(:asc, OpenAI::ContainerListParams::Order::TaggedSymbol) + DESC = T.let(:desc, OpenAI::ContainerListParams::Order::TaggedSymbol) + + sig do + override.returns( + T::Array[OpenAI::ContainerListParams::Order::TaggedSymbol] + ) + end + def self.values + end + end + end + end +end diff --git a/rbi/openai/models/container_list_response.rbi b/rbi/openai/models/container_list_response.rbi new file mode 100644 index 00000000..4431fa79 --- /dev/null +++ b/rbi/openai/models/container_list_response.rbi @@ -0,0 +1,190 @@ +# typed: strong + +module OpenAI + module Models + class ContainerListResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerListResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the container. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the container was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Name of the container. + sig { returns(String) } + attr_accessor :name + + # The type of this object. + sig { returns(String) } + attr_accessor :object + + # Status of the container (e.g., active, deleted). + sig { returns(String) } + attr_accessor :status + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + returns(T.nilable(OpenAI::Models::ContainerListResponse::ExpiresAfter)) + end + attr_reader :expires_after + + sig do + params( + expires_after: + OpenAI::Models::ContainerListResponse::ExpiresAfter::OrHash + ).void + end + attr_writer :expires_after + + sig do + params( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: + OpenAI::Models::ContainerListResponse::ExpiresAfter::OrHash + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the container. + id:, + # Unix timestamp (in seconds) when the container was created. + created_at:, + # Name of the container. + name:, + # The type of this object. + object:, + # Status of the container (e.g., active, deleted). + status:, + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + expires_after: nil + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter + } + ) + end + def to_hash + end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerListResponse::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # The reference point for the expiration. + sig do + returns( + T.nilable( + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + ) + end + attr_reader :anchor + + sig do + params( + anchor: + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::OrSymbol + ).void + end + attr_writer :anchor + + # The number of minutes after the anchor before the container expires. + sig { returns(T.nilable(Integer)) } + attr_reader :minutes + + sig { params(minutes: Integer).void } + attr_writer :minutes + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + params( + anchor: + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::OrSymbol, + minutes: Integer + ).returns(T.attached_class) + end + def self.new( + # The reference point for the expiration. + anchor: nil, + # The number of minutes after the anchor before the container expires. + minutes: nil + ) + end + + sig do + override.returns( + { + anchor: + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::TaggedSymbol, + minutes: Integer + } + ) + end + def to_hash + end + + # The reference point for the expiration. + module Anchor + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LAST_ACTIVE_AT = + T.let( + :last_active_at, + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::ContainerListResponse::ExpiresAfter::Anchor::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/container_retrieve_params.rbi b/rbi/openai/models/container_retrieve_params.rbi new file mode 100644 index 00000000..6c987580 --- /dev/null +++ b/rbi/openai/models/container_retrieve_params.rbi @@ -0,0 +1,27 @@ +# typed: strong + +module OpenAI + module Models + class ContainerRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any(OpenAI::ContainerRetrieveParams, OpenAI::Internal::AnyHash) + end + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end +end diff --git a/rbi/openai/models/container_retrieve_response.rbi b/rbi/openai/models/container_retrieve_response.rbi new file mode 100644 index 00000000..a81e2d6a --- /dev/null +++ b/rbi/openai/models/container_retrieve_response.rbi @@ -0,0 +1,193 @@ +# typed: strong + +module OpenAI + module Models + class ContainerRetrieveResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerRetrieveResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the container. + sig { returns(String) } + attr_accessor :id + + # Unix timestamp (in seconds) when the container was created. + sig { returns(Integer) } + attr_accessor :created_at + + # Name of the container. + sig { returns(String) } + attr_accessor :name + + # The type of this object. + sig { returns(String) } + attr_accessor :object + + # Status of the container (e.g., active, deleted). + sig { returns(String) } + attr_accessor :status + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + returns( + T.nilable(OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter) + ) + end + attr_reader :expires_after + + sig do + params( + expires_after: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::OrHash + ).void + end + attr_writer :expires_after + + sig do + params( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::OrHash + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the container. + id:, + # Unix timestamp (in seconds) when the container was created. + created_at:, + # Name of the container. + name:, + # The type of this object. + object:, + # Status of the container (e.g., active, deleted). + status:, + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + expires_after: nil + ) + end + + sig do + override.returns( + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + } + ) + end + def to_hash + end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # The reference point for the expiration. + sig do + returns( + T.nilable( + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + ) + end + attr_reader :anchor + + sig do + params( + anchor: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::OrSymbol + ).void + end + attr_writer :anchor + + # The number of minutes after the anchor before the container expires. + sig { returns(T.nilable(Integer)) } + attr_reader :minutes + + sig { params(minutes: Integer).void } + attr_writer :minutes + + # The container will expire after this time period. The anchor is the reference + # point for the expiration. The minutes is the number of minutes after the anchor + # before the container expires. + sig do + params( + anchor: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::OrSymbol, + minutes: Integer + ).returns(T.attached_class) + end + def self.new( + # The reference point for the expiration. + anchor: nil, + # The number of minutes after the anchor before the container expires. + minutes: nil + ) + end + + sig do + override.returns( + { + anchor: + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::TaggedSymbol, + minutes: Integer + } + ) + end + def to_hash + end + + # The reference point for the expiration. + module Anchor + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LAST_ACTIVE_AT = + T.let( + :last_active_at, + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::Anchor::TaggedSymbol + ] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_create_params.rbi b/rbi/openai/models/containers/file_create_params.rbi new file mode 100644 index 00000000..4d752f50 --- /dev/null +++ b/rbi/openai/models/containers/file_create_params.rbi @@ -0,0 +1,62 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Containers::FileCreateParams, + OpenAI::Internal::AnyHash + ) + end + + # The File object (not file name) to be uploaded. + sig { returns(T.nilable(OpenAI::Internal::FileInput)) } + attr_reader :file + + sig { params(file: OpenAI::Internal::FileInput).void } + attr_writer :file + + # Name of the file to create. + sig { returns(T.nilable(String)) } + attr_reader :file_id + + sig { params(file_id: String).void } + attr_writer :file_id + + sig do + params( + file: OpenAI::Internal::FileInput, + file_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # The File object (not file name) to be uploaded. + file: nil, + # Name of the file to create. + file_id: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + file: OpenAI::Internal::FileInput, + file_id: String, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_create_response.rbi b/rbi/openai/models/containers/file_create_response.rbi new file mode 100644 index 00000000..c39f81c6 --- /dev/null +++ b/rbi/openai/models/containers/file_create_response.rbi @@ -0,0 +1,90 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileCreateResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Containers::FileCreateResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the file. + sig { returns(String) } + attr_accessor :id + + # Size of the file in bytes. + sig { returns(Integer) } + attr_accessor :bytes + + # The container this file belongs to. + sig { returns(String) } + attr_accessor :container_id + + # Unix timestamp (in seconds) when the file was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The type of this object (`container.file`). + sig { returns(Symbol) } + attr_accessor :object + + # Path of the file in the container. + sig { returns(String) } + attr_accessor :path + + # Source of the file (e.g., `user`, `assistant`). + sig { returns(String) } + attr_accessor :source + + sig do + params( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + object: Symbol + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the file. + id:, + # Size of the file in bytes. + bytes:, + # The container this file belongs to. + container_id:, + # Unix timestamp (in seconds) when the file was created. + created_at:, + # Path of the file in the container. + path:, + # Source of the file (e.g., `user`, `assistant`). + source:, + # The type of this object (`container.file`). + object: :"container.file" + ) + end + + sig do + override.returns( + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_delete_params.rbi b/rbi/openai/models/containers/file_delete_params.rbi new file mode 100644 index 00000000..42830150 --- /dev/null +++ b/rbi/openai/models/containers/file_delete_params.rbi @@ -0,0 +1,40 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Containers::FileDeleteParams, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :container_id + + sig do + params( + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(container_id:, request_options: {}) + end + + sig do + override.returns( + { container_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_list_params.rbi b/rbi/openai/models/containers/file_list_params.rbi new file mode 100644 index 00000000..f62afa88 --- /dev/null +++ b/rbi/openai/models/containers/file_list_params.rbi @@ -0,0 +1,116 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any(OpenAI::Containers::FileListParams, OpenAI::Internal::AnyHash) + end + + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + sig { returns(T.nilable(String)) } + attr_reader :after + + sig { params(after: String).void } + attr_writer :after + + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + sig { returns(T.nilable(Integer)) } + attr_reader :limit + + sig { params(limit: Integer).void } + attr_writer :limit + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + sig do + returns( + T.nilable(OpenAI::Containers::FileListParams::Order::OrSymbol) + ) + end + attr_reader :order + + sig do + params( + order: OpenAI::Containers::FileListParams::Order::OrSymbol + ).void + end + attr_writer :order + + sig do + params( + after: String, + limit: Integer, + order: OpenAI::Containers::FileListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) + end + + sig do + override.returns( + { + after: String, + limit: Integer, + order: OpenAI::Containers::FileListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions + } + ) + end + def to_hash + end + + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + module Order + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Containers::FileListParams::Order) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + ASC = + T.let(:asc, OpenAI::Containers::FileListParams::Order::TaggedSymbol) + DESC = + T.let( + :desc, + OpenAI::Containers::FileListParams::Order::TaggedSymbol + ) + + sig do + override.returns( + T::Array[OpenAI::Containers::FileListParams::Order::TaggedSymbol] + ) + end + def self.values + end + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_list_response.rbi b/rbi/openai/models/containers/file_list_response.rbi new file mode 100644 index 00000000..f33ad693 --- /dev/null +++ b/rbi/openai/models/containers/file_list_response.rbi @@ -0,0 +1,90 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileListResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Containers::FileListResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the file. + sig { returns(String) } + attr_accessor :id + + # Size of the file in bytes. + sig { returns(Integer) } + attr_accessor :bytes + + # The container this file belongs to. + sig { returns(String) } + attr_accessor :container_id + + # Unix timestamp (in seconds) when the file was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The type of this object (`container.file`). + sig { returns(Symbol) } + attr_accessor :object + + # Path of the file in the container. + sig { returns(String) } + attr_accessor :path + + # Source of the file (e.g., `user`, `assistant`). + sig { returns(String) } + attr_accessor :source + + sig do + params( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + object: Symbol + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the file. + id:, + # Size of the file in bytes. + bytes:, + # The container this file belongs to. + container_id:, + # Unix timestamp (in seconds) when the file was created. + created_at:, + # Path of the file in the container. + path:, + # Source of the file (e.g., `user`, `assistant`). + source:, + # The type of this object (`container.file`). + object: :"container.file" + ) + end + + sig do + override.returns( + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_retrieve_params.rbi b/rbi/openai/models/containers/file_retrieve_params.rbi new file mode 100644 index 00000000..0e9bfd6e --- /dev/null +++ b/rbi/openai/models/containers/file_retrieve_params.rbi @@ -0,0 +1,40 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Containers::FileRetrieveParams, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :container_id + + sig do + params( + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(container_id:, request_options: {}) + end + + sig do + override.returns( + { container_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/file_retrieve_response.rbi b/rbi/openai/models/containers/file_retrieve_response.rbi new file mode 100644 index 00000000..eea83ee4 --- /dev/null +++ b/rbi/openai/models/containers/file_retrieve_response.rbi @@ -0,0 +1,90 @@ +# typed: strong + +module OpenAI + module Models + module Containers + class FileRetrieveResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Containers::FileRetrieveResponse, + OpenAI::Internal::AnyHash + ) + end + + # Unique identifier for the file. + sig { returns(String) } + attr_accessor :id + + # Size of the file in bytes. + sig { returns(Integer) } + attr_accessor :bytes + + # The container this file belongs to. + sig { returns(String) } + attr_accessor :container_id + + # Unix timestamp (in seconds) when the file was created. + sig { returns(Integer) } + attr_accessor :created_at + + # The type of this object (`container.file`). + sig { returns(Symbol) } + attr_accessor :object + + # Path of the file in the container. + sig { returns(String) } + attr_accessor :path + + # Source of the file (e.g., `user`, `assistant`). + sig { returns(String) } + attr_accessor :source + + sig do + params( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + object: Symbol + ).returns(T.attached_class) + end + def self.new( + # Unique identifier for the file. + id:, + # Size of the file in bytes. + bytes:, + # The container this file belongs to. + container_id:, + # Unix timestamp (in seconds) when the file was created. + created_at:, + # Path of the file in the container. + path:, + # Source of the file (e.g., `user`, `assistant`). + source:, + # The type of this object (`container.file`). + object: :"container.file" + ) + end + + sig do + override.returns( + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/containers/files/content_retrieve_params.rbi b/rbi/openai/models/containers/files/content_retrieve_params.rbi new file mode 100644 index 00000000..76a5f6a1 --- /dev/null +++ b/rbi/openai/models/containers/files/content_retrieve_params.rbi @@ -0,0 +1,42 @@ +# typed: strong + +module OpenAI + module Models + module Containers + module Files + class ContentRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Containers::Files::ContentRetrieveParams, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(String) } + attr_accessor :container_id + + sig do + params( + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(T.attached_class) + end + def self.new(container_id:, request_options: {}) + end + + sig do + override.returns( + { container_id: String, request_options: OpenAI::RequestOptions } + ) + end + def to_hash + end + end + end + end + end +end diff --git a/rbi/openai/models/embedding_create_params.rbi b/rbi/openai/models/embedding_create_params.rbi index 147292fe..9d60fac5 100644 --- a/rbi/openai/models/embedding_create_params.rbi +++ b/rbi/openai/models/embedding_create_params.rbi @@ -20,16 +20,7 @@ module OpenAI # for counting tokens. In addition to the per-input token limit, all embedding # models enforce a maximum of 300,000 tokens summed across all inputs in a single # request. - sig do - returns( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ) - end + sig { returns(OpenAI::EmbeddingCreateParams::Input::Variants) } attr_accessor :input # ID of the model to use. You can use the @@ -76,13 +67,7 @@ module OpenAI sig do params( - input: - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ), + input: OpenAI::EmbeddingCreateParams::Input::Variants, model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), dimensions: Integer, encoding_format: @@ -125,13 +110,7 @@ module OpenAI sig do override.returns( { - input: - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ), + input: OpenAI::EmbeddingCreateParams::Input::Variants, model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), dimensions: Integer, encoding_format: diff --git a/rbi/openai/models/eval_create_params.rbi b/rbi/openai/models/eval_create_params.rbi index a1ba68fb..7131576a 100644 --- a/rbi/openai/models/eval_create_params.rbi +++ b/rbi/openai/models/eval_create_params.rbi @@ -11,7 +11,8 @@ module OpenAI T.any(OpenAI::EvalCreateParams, OpenAI::Internal::AnyHash) end - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. sig do returns( T.any( @@ -23,7 +24,10 @@ module OpenAI end attr_accessor :data_source_config - # A list of graders for all eval runs in this group. + # A list of graders for all eval runs in this group. Graders can reference + # variables in the data source using double curly braces notation, like + # `{{item.variable_name}}`. To reference the model's output, use the `sample` + # namespace (ie, `{{sample.output_text}}`). sig do returns( T::Array[ @@ -79,9 +83,13 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. data_source_config:, - # A list of graders for all eval runs in this group. + # A list of graders for all eval runs in this group. Graders can reference + # variables in the data source using double curly braces notation, like + # `{{item.variable_name}}`. To reference the model's output, use the `sample` + # namespace (ie, `{{sample.output_text}}`). testing_criteria:, # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -124,7 +132,8 @@ module OpenAI def to_hash end - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. module DataSourceConfig extend OpenAI::Internal::Type::Union @@ -252,7 +261,7 @@ module OpenAI ) end - # The type of data source. Always `stored-completions`. + # The type of data source. Always `stored_completions`. sig { returns(Symbol) } attr_accessor :type @@ -272,8 +281,8 @@ module OpenAI def self.new( # Metadata filters for the stored completions data source. metadata: nil, - # The type of data source. Always `stored-completions`. - type: :"stored-completions" + # The type of data source. Always `stored_completions`. + type: :stored_completions ) end @@ -321,13 +330,13 @@ module OpenAI end # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. sig do returns( T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalItem + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) ] ) @@ -362,7 +371,7 @@ module OpenAI T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage::OrHash, - OpenAI::EvalItem::OrHash + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::OrHash ) ], labels: T::Array[String], @@ -374,7 +383,7 @@ module OpenAI end def self.new( # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. input:, # The labels to classify to each item in the evaluation. labels:, @@ -396,7 +405,7 @@ module OpenAI T::Array[ T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalItem + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) ], labels: T::Array[String], @@ -411,7 +420,7 @@ module OpenAI end # A chat message that makes up the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. module Input extend OpenAI::Internal::Type::Union @@ -419,7 +428,7 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, - OpenAI::EvalItem + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem ) end @@ -456,6 +465,238 @@ module OpenAI end end + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, + type: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + ), + role: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::OrSymbol, + type: + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/eval_create_response.rbi b/rbi/openai/models/eval_create_response.rbi index a3433afc..6513b6f7 100644 --- a/rbi/openai/models/eval_create_response.rbi +++ b/rbi/openai/models/eval_create_response.rbi @@ -18,13 +18,7 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do - returns( - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - ) + returns(OpenAI::Models::EvalCreateResponse::DataSourceConfig::Variants) end attr_accessor :data_source_config @@ -49,13 +43,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalCreateResponse::TestingCriterion::Variants ] ) end @@ -66,7 +54,7 @@ module OpenAI # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o sig do params( id: String, @@ -74,7 +62,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, - OpenAI::EvalLogsDataSourceConfig::OrHash, + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -82,8 +70,8 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::Graders::LabelModelGrader::OrHash, - OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::Graders::LabelModelGrader::OrHash, + OpenAI::Models::Graders::StringCheckGrader::OrHash, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython::OrHash, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel::OrHash @@ -121,23 +109,13 @@ module OpenAI id: String, created_at: Integer, data_source_config: - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ), + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Variants, metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, testing_criteria: T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalCreateResponse::TestingCriterion::Variants ] } ) @@ -153,11 +131,79 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs, + OpenAI::Internal::AnyHash + ) + end + + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `logs`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + sig do + params( + schema: T::Hash[Symbol, T.anything], + metadata: T.nilable(T::Hash[Symbol, String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ) + end + + sig do + override.returns( + { + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -177,8 +223,8 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, + OpenAI::Models::Graders::LabelModelGrader, + OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel diff --git a/rbi/openai/models/eval_item.rbi b/rbi/openai/models/eval_item.rbi deleted file mode 100644 index 841d9960..00000000 --- a/rbi/openai/models/eval_item.rbi +++ /dev/null @@ -1,161 +0,0 @@ -# typed: strong - -module OpenAI - module Models - class EvalItem < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias { T.any(OpenAI::EvalItem, OpenAI::Internal::AnyHash) } - - # Text inputs to the model - can contain template strings. - sig do - returns( - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalItem::Content::OutputText - ) - ) - end - attr_accessor :content - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - sig { returns(OpenAI::EvalItem::Role::OrSymbol) } - attr_accessor :role - - # The type of the message input. Always `message`. - sig { returns(T.nilable(OpenAI::EvalItem::Type::OrSymbol)) } - attr_reader :type - - sig { params(type: OpenAI::EvalItem::Type::OrSymbol).void } - attr_writer :type - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - sig do - params( - content: - T.any( - String, - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::EvalItem::Content::OutputText::OrHash - ), - role: OpenAI::EvalItem::Role::OrSymbol, - type: OpenAI::EvalItem::Type::OrSymbol - ).returns(T.attached_class) - end - def self.new( - # Text inputs to the model - can contain template strings. - content:, - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - role:, - # The type of the message input. Always `message`. - type: nil - ) - end - - sig do - override.returns( - { - content: - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalItem::Content::OutputText - ), - role: OpenAI::EvalItem::Role::OrSymbol, - type: OpenAI::EvalItem::Type::OrSymbol - } - ) - end - def to_hash - end - - # Text inputs to the model - can contain template strings. - module Content - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - String, - OpenAI::Responses::ResponseInputText, - OpenAI::EvalItem::Content::OutputText - ) - end - - class OutputText < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::EvalItem::Content::OutputText, - OpenAI::Internal::AnyHash - ) - end - - # The text output from the model. - sig { returns(String) } - attr_accessor :text - - # The type of the output text. Always `output_text`. - sig { returns(Symbol) } - attr_accessor :type - - # A text output from the model. - sig { params(text: String, type: Symbol).returns(T.attached_class) } - def self.new( - # The text output from the model. - text:, - # The type of the output text. Always `output_text`. - type: :output_text - ) - end - - sig { override.returns({ text: String, type: Symbol }) } - def to_hash - end - end - - sig { override.returns(T::Array[OpenAI::EvalItem::Content::Variants]) } - def self.variants - end - end - - # The role of the message input. One of `user`, `assistant`, `system`, or - # `developer`. - module Role - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::EvalItem::Role) } - OrSymbol = T.type_alias { T.any(Symbol, String) } - - USER = T.let(:user, OpenAI::EvalItem::Role::TaggedSymbol) - ASSISTANT = T.let(:assistant, OpenAI::EvalItem::Role::TaggedSymbol) - SYSTEM = T.let(:system, OpenAI::EvalItem::Role::TaggedSymbol) - DEVELOPER = T.let(:developer, OpenAI::EvalItem::Role::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::EvalItem::Role::TaggedSymbol]) } - def self.values - end - end - - # The type of the message input. Always `message`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::EvalItem::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String) } - - MESSAGE = T.let(:message, OpenAI::EvalItem::Type::TaggedSymbol) - - sig { override.returns(T::Array[OpenAI::EvalItem::Type::TaggedSymbol]) } - def self.values - end - end - end - end -end diff --git a/rbi/openai/models/eval_list_response.rbi b/rbi/openai/models/eval_list_response.rbi index c8e63e69..1158ef42 100644 --- a/rbi/openai/models/eval_list_response.rbi +++ b/rbi/openai/models/eval_list_response.rbi @@ -18,13 +18,7 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do - returns( - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - ) + returns(OpenAI::Models::EvalListResponse::DataSourceConfig::Variants) end attr_accessor :data_source_config @@ -48,15 +42,7 @@ module OpenAI # A list of testing criteria. sig do returns( - T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel - ) - ] + T::Array[OpenAI::Models::EvalListResponse::TestingCriterion::Variants] ) end attr_accessor :testing_criteria @@ -66,7 +52,7 @@ module OpenAI # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o sig do params( id: String, @@ -74,7 +60,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, - OpenAI::EvalLogsDataSourceConfig::OrHash, + OpenAI::Models::EvalListResponse::DataSourceConfig::Logs::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -82,8 +68,8 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::Graders::LabelModelGrader::OrHash, - OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::Graders::LabelModelGrader::OrHash, + OpenAI::Models::Graders::StringCheckGrader::OrHash, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython::OrHash, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel::OrHash @@ -121,23 +107,13 @@ module OpenAI id: String, created_at: Integer, data_source_config: - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ), + OpenAI::Models::EvalListResponse::DataSourceConfig::Variants, metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, testing_criteria: T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalListResponse::TestingCriterion::Variants ] } ) @@ -153,11 +129,79 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::EvalListResponse::DataSourceConfig::Logs, + OpenAI::Internal::AnyHash + ) + end + + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `logs`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + sig do + params( + schema: T::Hash[Symbol, T.anything], + metadata: T.nilable(T::Hash[Symbol, String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ) + end + + sig do + override.returns( + { + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -177,8 +221,8 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, + OpenAI::Models::Graders::LabelModelGrader, + OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel diff --git a/rbi/openai/models/eval_logs_data_source_config.rbi b/rbi/openai/models/eval_logs_data_source_config.rbi deleted file mode 100644 index bbaac918..00000000 --- a/rbi/openai/models/eval_logs_data_source_config.rbi +++ /dev/null @@ -1,70 +0,0 @@ -# typed: strong - -module OpenAI - module Models - class EvalLogsDataSourceConfig < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any(OpenAI::EvalLogsDataSourceConfig, OpenAI::Internal::AnyHash) - end - - # The json schema for the run data source items. Learn how to build JSON schemas - # [here](https://json-schema.org/). - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :schema - - # The type of data source. Always `logs`. - sig { returns(Symbol) } - attr_accessor :type - - # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - sig { returns(T.nilable(T::Hash[Symbol, String])) } - attr_accessor :metadata - - # A LogsDataSourceConfig which specifies the metadata property of your logs query. - # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The - # schema returned by this data source config is used to defined what variables are - # available in your evals. `item` and `sample` are both defined when using this - # data source config. - sig do - params( - schema: T::Hash[Symbol, T.anything], - metadata: T.nilable(T::Hash[Symbol, String]), - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The json schema for the run data source items. Learn how to build JSON schemas - # [here](https://json-schema.org/). - schema:, - # Set of 16 key-value pairs that can be attached to an object. This can be useful - # for storing additional information about the object in a structured format, and - # querying for objects via API or the dashboard. - # - # Keys are strings with a maximum length of 64 characters. Values are strings with - # a maximum length of 512 characters. - metadata: nil, - # The type of data source. Always `logs`. - type: :logs - ) - end - - sig do - override.returns( - { - schema: T::Hash[Symbol, T.anything], - type: Symbol, - metadata: T.nilable(T::Hash[Symbol, String]) - } - ) - end - def to_hash - end - end - end -end diff --git a/rbi/openai/models/eval_retrieve_response.rbi b/rbi/openai/models/eval_retrieve_response.rbi index baacb528..66b5570d 100644 --- a/rbi/openai/models/eval_retrieve_response.rbi +++ b/rbi/openai/models/eval_retrieve_response.rbi @@ -19,11 +19,7 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do returns( - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Variants ) end attr_accessor :data_source_config @@ -49,13 +45,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Variants ] ) end @@ -66,7 +56,7 @@ module OpenAI # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o sig do params( id: String, @@ -74,7 +64,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, - OpenAI::EvalLogsDataSourceConfig::OrHash, + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -82,8 +72,8 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::Graders::LabelModelGrader::OrHash, - OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::Graders::LabelModelGrader::OrHash, + OpenAI::Models::Graders::StringCheckGrader::OrHash, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython::OrHash, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel::OrHash @@ -121,23 +111,13 @@ module OpenAI id: String, created_at: Integer, data_source_config: - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ), + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Variants, metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, testing_criteria: T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalRetrieveResponse::TestingCriterion::Variants ] } ) @@ -153,11 +133,79 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs, + OpenAI::Internal::AnyHash + ) + end + + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `logs`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + sig do + params( + schema: T::Hash[Symbol, T.anything], + metadata: T.nilable(T::Hash[Symbol, String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ) + end + + sig do + override.returns( + { + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -177,8 +225,8 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, + OpenAI::Models::Graders::LabelModelGrader, + OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel diff --git a/rbi/openai/models/eval_stored_completions_data_source_config.rbi b/rbi/openai/models/eval_stored_completions_data_source_config.rbi index ac338ea9..013b469d 100644 --- a/rbi/openai/models/eval_stored_completions_data_source_config.rbi +++ b/rbi/openai/models/eval_stored_completions_data_source_config.rbi @@ -16,7 +16,7 @@ module OpenAI sig { returns(T::Hash[Symbol, T.anything]) } attr_accessor :schema - # The type of data source. Always `stored-completions`. + # The type of data source. Always `stored_completions`. sig { returns(Symbol) } attr_accessor :type @@ -48,8 +48,8 @@ module OpenAI # Keys are strings with a maximum length of 64 characters. Values are strings with # a maximum length of 512 characters. metadata: nil, - # The type of data source. Always `stored-completions`. - type: :"stored-completions" + # The type of data source. Always `stored_completions`. + type: :stored_completions ) end diff --git a/rbi/openai/models/eval_update_response.rbi b/rbi/openai/models/eval_update_response.rbi index f82939b7..07d46351 100644 --- a/rbi/openai/models/eval_update_response.rbi +++ b/rbi/openai/models/eval_update_response.rbi @@ -18,13 +18,7 @@ module OpenAI # Configuration of data sources used in runs of the evaluation. sig do - returns( - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ) - ) + returns(OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Variants) end attr_accessor :data_source_config @@ -49,13 +43,7 @@ module OpenAI sig do returns( T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalUpdateResponse::TestingCriterion::Variants ] ) end @@ -66,7 +54,7 @@ module OpenAI # # - Improve the quality of my chatbot # - See how well my chatbot handles customer support - # - Check if o3-mini is better at my usecase than gpt-4o + # - Check if o4-mini is better at my usecase than gpt-4o sig do params( id: String, @@ -74,7 +62,7 @@ module OpenAI data_source_config: T.any( OpenAI::EvalCustomDataSourceConfig::OrHash, - OpenAI::EvalLogsDataSourceConfig::OrHash, + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs::OrHash, OpenAI::EvalStoredCompletionsDataSourceConfig::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), @@ -82,8 +70,8 @@ module OpenAI testing_criteria: T::Array[ T.any( - OpenAI::Graders::LabelModelGrader::OrHash, - OpenAI::Graders::StringCheckGrader::OrHash, + OpenAI::Models::Graders::LabelModelGrader::OrHash, + OpenAI::Models::Graders::StringCheckGrader::OrHash, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity::OrHash, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython::OrHash, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel::OrHash @@ -121,23 +109,13 @@ module OpenAI id: String, created_at: Integer, data_source_config: - T.any( - OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, - OpenAI::EvalStoredCompletionsDataSourceConfig - ), + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Variants, metadata: T.nilable(T::Hash[Symbol, String]), name: String, object: Symbol, testing_criteria: T::Array[ - T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, - OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel - ) + OpenAI::Models::EvalUpdateResponse::TestingCriterion::Variants ] } ) @@ -153,11 +131,79 @@ module OpenAI T.type_alias do T.any( OpenAI::EvalCustomDataSourceConfig, - OpenAI::EvalLogsDataSourceConfig, + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, OpenAI::EvalStoredCompletionsDataSourceConfig ) end + class Logs < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs, + OpenAI::Internal::AnyHash + ) + end + + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :schema + + # The type of data source. Always `logs`. + sig { returns(Symbol) } + attr_accessor :type + + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :metadata + + # A LogsDataSourceConfig which specifies the metadata property of your logs query. + # This is usually metadata like `usecase=chatbot` or `prompt-version=v2`, etc. The + # schema returned by this data source config is used to defined what variables are + # available in your evals. `item` and `sample` are both defined when using this + # data source config. + sig do + params( + schema: T::Hash[Symbol, T.anything], + metadata: T.nilable(T::Hash[Symbol, String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The json schema for the run data source items. Learn how to build JSON schemas + # [here](https://json-schema.org/). + schema:, + # Set of 16 key-value pairs that can be attached to an object. This can be useful + # for storing additional information about the object in a structured format, and + # querying for objects via API or the dashboard. + # + # Keys are strings with a maximum length of 64 characters. Values are strings with + # a maximum length of 512 characters. + metadata: nil, + # The type of data source. Always `logs`. + type: :logs + ) + end + + sig do + override.returns( + { + schema: T::Hash[Symbol, T.anything], + type: Symbol, + metadata: T.nilable(T::Hash[Symbol, String]) + } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[ @@ -177,8 +223,8 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Graders::LabelModelGrader, - OpenAI::Graders::StringCheckGrader, + OpenAI::Models::Graders::LabelModelGrader, + OpenAI::Models::Graders::StringCheckGrader, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython, OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel diff --git a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi index 5ab2adf7..3b8ebdb7 100644 --- a/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -12,12 +12,12 @@ module OpenAI ) end - # A StoredCompletionsRunDataSource configuration describing a set of filters + # Determines what populates the `item` namespace in this run's data source. sig do returns( T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ) ) @@ -32,6 +32,10 @@ module OpenAI end attr_accessor :type + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. sig do returns( T.nilable( @@ -84,8 +88,8 @@ module OpenAI params( source: T.any( - OpenAI::Evals::EvalJSONLFileContentSource::OrHash, - OpenAI::Evals::EvalJSONLFileIDSource::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions::OrHash ), type: @@ -101,10 +105,14 @@ module OpenAI ).returns(T.attached_class) end def self.new( - # A StoredCompletionsRunDataSource configuration describing a set of filters + # Determines what populates the `item` namespace in this run's data source. source:, # The type of run data source. Always `completions`. type:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. input_messages: nil, # The name of the model to use for generating completions (e.g. "o3-mini"). model: nil, @@ -117,8 +125,8 @@ module OpenAI { source: T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ), type: @@ -137,19 +145,144 @@ module OpenAI def to_hash end - # A StoredCompletionsRunDataSource configuration describing a set of filters + # Determines what populates the `item` namespace in this run's data source. module Source extend OpenAI::Internal::Type::Union Variants = T.type_alias do T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions ) end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + class StoredCompletions < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do @@ -277,6 +410,10 @@ module OpenAI end end + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. module InputMessages extend OpenAI::Internal::Type::Union @@ -298,11 +435,14 @@ module OpenAI end # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. sig do returns( T::Array[ - T.any(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem) + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + ) ] ) end @@ -318,7 +458,7 @@ module OpenAI T::Array[ T.any( OpenAI::Responses::EasyInputMessage::OrHash, - OpenAI::EvalItem::OrHash + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::OrHash ) ], type: Symbol @@ -326,7 +466,7 @@ module OpenAI end def self.new( # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. + # references to the `item` namespace, ie {{item.name}}. template:, # The type of input messages. Always `template`. type: :template @@ -340,7 +480,7 @@ module OpenAI T::Array[ T.any( OpenAI::Responses::EasyInputMessage, - OpenAI::EvalItem + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message ) ], type: Symbol @@ -360,9 +500,246 @@ module OpenAI Variants = T.type_alias do - T.any(OpenAI::Responses::EasyInputMessage, OpenAI::EvalItem) + T.any( + OpenAI::Responses::EasyInputMessage, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message + ) + end + + class Message < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText::OrHash + ), + role: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) end + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ), + role: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::OrSymbol, + type: + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + sig do override.returns( T::Array[ @@ -384,7 +761,7 @@ module OpenAI ) end - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.input_trajectory" sig { returns(String) } attr_accessor :item_reference @@ -398,7 +775,7 @@ module OpenAI ) end def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" + # A reference to a variable in the `item` namespace. Ie, "item.input_trajectory" item_reference:, # The type of input messages. Always `item_reference`. type: :item_reference diff --git a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi index af53b5ed..d72d28b8 100644 --- a/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi +++ b/rbi/openai/models/evals/create_eval_jsonl_run_data_source.rbi @@ -12,11 +12,12 @@ module OpenAI ) end + # Determines what populates the `item` namespace in the data source. sig do returns( T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID ) ) end @@ -32,13 +33,14 @@ module OpenAI params( source: T.any( - OpenAI::Evals::EvalJSONLFileContentSource::OrHash, - OpenAI::Evals::EvalJSONLFileIDSource::OrHash + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::OrHash, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID::OrHash ), type: Symbol ).returns(T.attached_class) end def self.new( + # Determines what populates the `item` namespace in the data source. source:, # The type of data source. Always `jsonl`. type: :jsonl @@ -50,8 +52,8 @@ module OpenAI { source: T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID ), type: Symbol } @@ -60,17 +62,143 @@ module OpenAI def to_hash end + # Determines what populates the `item` namespace in the data source. module Source extend OpenAI::Internal::Type::Union Variants = T.type_alias do T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID ) end + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig { params(id: String, type: Symbol).returns(T.attached_class) } + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + sig do override.returns( T::Array[ diff --git a/rbi/openai/models/evals/create_eval_responses_run_data_source.rbi b/rbi/openai/models/evals/create_eval_responses_run_data_source.rbi deleted file mode 100644 index 1567b220..00000000 --- a/rbi/openai/models/evals/create_eval_responses_run_data_source.rbi +++ /dev/null @@ -1,591 +0,0 @@ -# typed: strong - -module OpenAI - module Models - module Evals - class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource, - OpenAI::Internal::AnyHash - ) - end - - # A EvalResponsesSource object describing a run data source configuration. - sig do - returns( - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses - ) - ) - end - attr_accessor :source - - # The type of run data source. Always `responses`. - sig do - returns( - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::OrSymbol - ) - end - attr_accessor :type - - sig do - returns( - T.nilable( - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - ) - ) - end - attr_reader :input_messages - - sig do - params( - input_messages: - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash - ) - ).void - end - attr_writer :input_messages - - # The name of the model to use for generating completions (e.g. "o3-mini"). - sig { returns(T.nilable(String)) } - attr_reader :model - - sig { params(model: String).void } - attr_writer :model - - sig do - returns( - T.nilable( - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - ) - ) - end - attr_reader :sampling_params - - sig do - params( - sampling_params: - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams::OrHash - ).void - end - attr_writer :sampling_params - - # A ResponsesRunDataSource object describing a model sampling configuration. - sig do - params( - source: - T.any( - OpenAI::Evals::EvalJSONLFileContentSource::OrHash, - OpenAI::Evals::EvalJSONLFileIDSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses::OrHash - ), - type: - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::OrSymbol, - input_messages: - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash - ), - model: String, - sampling_params: - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams::OrHash - ).returns(T.attached_class) - end - def self.new( - # A EvalResponsesSource object describing a run data source configuration. - source:, - # The type of run data source. Always `responses`. - type:, - input_messages: nil, - # The name of the model to use for generating completions (e.g. "o3-mini"). - model: nil, - sampling_params: nil - ) - end - - sig do - override.returns( - { - source: - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses - ), - type: - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::OrSymbol, - input_messages: - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ), - model: String, - sampling_params: - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - } - ) - end - def to_hash - end - - # A EvalResponsesSource object describing a run data source configuration. - module Source - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses - ) - end - - class Responses < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses, - OpenAI::Internal::AnyHash - ) - end - - # The type of run data source. Always `responses`. - sig { returns(Symbol) } - attr_accessor :type - - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_after - - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - sig { returns(T.nilable(Integer)) } - attr_accessor :created_before - - # Whether the response has tool calls. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T::Boolean)) } - attr_accessor :has_tool_calls - - # Optional string to search the 'instructions' field. This is a query parameter - # used to select responses. - sig { returns(T.nilable(String)) } - attr_accessor :instructions_search - - # Metadata filter for the responses. This is a query parameter used to select - # responses. - sig { returns(T.nilable(T.anything)) } - attr_accessor :metadata - - # The name of the model to find responses for. This is a query parameter used to - # select responses. - sig { returns(T.nilable(String)) } - attr_accessor :model - - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } - attr_accessor :reasoning_effort - - # Sampling temperature. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :temperature - - # List of tool names. This is a query parameter used to select responses. - sig { returns(T.nilable(T::Array[String])) } - attr_accessor :tools - - # Nucleus sampling parameter. This is a query parameter used to select responses. - sig { returns(T.nilable(Float)) } - attr_accessor :top_p - - # List of user identifiers. This is a query parameter used to select responses. - sig { returns(T.nilable(T::Array[String])) } - attr_accessor :users - - # A EvalResponsesSource object describing a run data source configuration. - sig do - params( - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: T.nilable(OpenAI::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - tools: T.nilable(T::Array[String]), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]), - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # Only include items created after this timestamp (inclusive). This is a query - # parameter used to select responses. - created_after: nil, - # Only include items created before this timestamp (inclusive). This is a query - # parameter used to select responses. - created_before: nil, - # Whether the response has tool calls. This is a query parameter used to select - # responses. - has_tool_calls: nil, - # Optional string to search the 'instructions' field. This is a query parameter - # used to select responses. - instructions_search: nil, - # Metadata filter for the responses. This is a query parameter used to select - # responses. - metadata: nil, - # The name of the model to find responses for. This is a query parameter used to - # select responses. - model: nil, - # Optional reasoning effort parameter. This is a query parameter used to select - # responses. - reasoning_effort: nil, - # Sampling temperature. This is a query parameter used to select responses. - temperature: nil, - # List of tool names. This is a query parameter used to select responses. - tools: nil, - # Nucleus sampling parameter. This is a query parameter used to select responses. - top_p: nil, - # List of user identifiers. This is a query parameter used to select responses. - users: nil, - # The type of run data source. Always `responses`. - type: :responses - ) - end - - sig do - override.returns( - { - type: Symbol, - created_after: T.nilable(Integer), - created_before: T.nilable(Integer), - has_tool_calls: T.nilable(T::Boolean), - instructions_search: T.nilable(String), - metadata: T.nilable(T.anything), - model: T.nilable(String), - reasoning_effort: - T.nilable(OpenAI::ReasoningEffort::OrSymbol), - temperature: T.nilable(Float), - tools: T.nilable(T::Array[String]), - top_p: T.nilable(Float), - users: T.nilable(T::Array[String]) - } - ) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Variants - ] - ) - end - def self.variants - end - end - - # The type of run data source. Always `responses`. - module Type - extend OpenAI::Internal::Type::Enum - - TaggedSymbol = - T.type_alias do - T.all( - Symbol, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type - ) - end - OrSymbol = T.type_alias { T.any(Symbol, String) } - - RESPONSES = - T.let( - :responses, - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::TaggedSymbol - ) - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalResponsesRunDataSource::Type::TaggedSymbol - ] - ) - end - def self.values - end - end - - module InputMessages - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - ) - end - - class Template < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template, - OpenAI::Internal::AnyHash - ) - end - - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - sig do - returns( - T::Array[ - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::EvalItem - ) - ] - ) - end - attr_accessor :template - - # The type of input messages. Always `template`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - template: - T::Array[ - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage::OrHash, - OpenAI::EvalItem::OrHash - ) - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A list of chat messages forming the prompt or context. May include variable - # references to the "item" namespace, ie {{item.name}}. - template:, - # The type of input messages. Always `template`. - type: :template - ) - end - - sig do - override.returns( - { - template: - T::Array[ - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::EvalItem - ) - ], - type: Symbol - } - ) - end - def to_hash - end - - # A message input to the model with a role indicating instruction following - # hierarchy. Instructions given with the `developer` or `system` role take - # precedence over instructions given with the `user` role. Messages with the - # `assistant` role are presumed to have been generated by the model in previous - # interactions. - module Template - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::EvalItem - ) - end - - class ChatMessage < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, - OpenAI::Internal::AnyHash - ) - end - - # The content of the message. - sig { returns(String) } - attr_accessor :content - - # The role of the message (e.g. "system", "assistant", "user"). - sig { returns(String) } - attr_accessor :role - - sig do - params(content: String, role: String).returns( - T.attached_class - ) - end - def self.new( - # The content of the message. - content:, - # The role of the message (e.g. "system", "assistant", "user"). - role: - ) - end - - sig { override.returns({ content: String, role: String }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::Variants - ] - ) - end - def self.variants - end - end - end - - class ItemReference < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, - OpenAI::Internal::AnyHash - ) - end - - # A reference to a variable in the "item" namespace. Ie, "item.name" - sig { returns(String) } - attr_accessor :item_reference - - # The type of input messages. Always `item_reference`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params(item_reference: String, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # A reference to a variable in the "item" namespace. Ie, "item.name" - item_reference:, - # The type of input messages. Always `item_reference`. - type: :item_reference - ) - end - - sig { override.returns({ item_reference: String, type: Symbol }) } - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Variants - ] - ) - end - def self.variants - end - end - - class SamplingParams < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams, - OpenAI::Internal::AnyHash - ) - end - - # The maximum number of tokens in the generated output. - sig { returns(T.nilable(Integer)) } - attr_reader :max_completion_tokens - - sig { params(max_completion_tokens: Integer).void } - attr_writer :max_completion_tokens - - # A seed value to initialize the randomness, during sampling. - sig { returns(T.nilable(Integer)) } - attr_reader :seed - - sig { params(seed: Integer).void } - attr_writer :seed - - # A higher temperature increases randomness in the outputs. - sig { returns(T.nilable(Float)) } - attr_reader :temperature - - sig { params(temperature: Float).void } - attr_writer :temperature - - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - sig { returns(T.nilable(Float)) } - attr_reader :top_p - - sig { params(top_p: Float).void } - attr_writer :top_p - - sig do - params( - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - ).returns(T.attached_class) - end - def self.new( - # The maximum number of tokens in the generated output. - max_completion_tokens: nil, - # A seed value to initialize the randomness, during sampling. - seed: nil, - # A higher temperature increases randomness in the outputs. - temperature: nil, - # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. - top_p: nil - ) - end - - sig do - override.returns( - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - ) - end - def to_hash - end - end - end - end - end -end diff --git a/rbi/openai/models/evals/eval_jsonl_file_content_source.rbi b/rbi/openai/models/evals/eval_jsonl_file_content_source.rbi deleted file mode 100644 index 741fced0..00000000 --- a/rbi/openai/models/evals/eval_jsonl_file_content_source.rbi +++ /dev/null @@ -1,97 +0,0 @@ -# typed: strong - -module OpenAI - module Models - EvalJSONLFileContentSource = Evals::EvalJSONLFileContentSource - - module Evals - class EvalJSONLFileContentSource < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileContentSource, - OpenAI::Internal::AnyHash - ) - end - - # The content of the jsonl file. - sig do - returns(T::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content]) - end - attr_accessor :content - - # The type of jsonl source. Always `file_content`. - sig { returns(Symbol) } - attr_accessor :type - - sig do - params( - content: - T::Array[ - OpenAI::Evals::EvalJSONLFileContentSource::Content::OrHash - ], - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The content of the jsonl file. - content:, - # The type of jsonl source. Always `file_content`. - type: :file_content - ) - end - - sig do - override.returns( - { - content: - T::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content], - type: Symbol - } - ) - end - def to_hash - end - - class Content < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileContentSource::Content, - OpenAI::Internal::AnyHash - ) - end - - sig { returns(T::Hash[Symbol, T.anything]) } - attr_accessor :item - - sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } - attr_reader :sample - - sig { params(sample: T::Hash[Symbol, T.anything]).void } - attr_writer :sample - - sig do - params( - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - ).returns(T.attached_class) - end - def self.new(item:, sample: nil) - end - - sig do - override.returns( - { - item: T::Hash[Symbol, T.anything], - sample: T::Hash[Symbol, T.anything] - } - ) - end - def to_hash - end - end - end - end - end -end diff --git a/rbi/openai/models/evals/eval_jsonl_file_id_source.rbi b/rbi/openai/models/evals/eval_jsonl_file_id_source.rbi deleted file mode 100644 index c9da7ca8..00000000 --- a/rbi/openai/models/evals/eval_jsonl_file_id_source.rbi +++ /dev/null @@ -1,40 +0,0 @@ -# typed: strong - -module OpenAI - module Models - EvalJSONLFileIDSource = Evals::EvalJSONLFileIDSource - - module Evals - class EvalJSONLFileIDSource < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Evals::EvalJSONLFileIDSource, - OpenAI::Internal::AnyHash - ) - end - - # The identifier of the file. - sig { returns(String) } - attr_accessor :id - - # The type of jsonl source. Always `file_id`. - sig { returns(Symbol) } - attr_accessor :type - - sig { params(id: String, type: Symbol).returns(T.attached_class) } - def self.new( - # The identifier of the file. - id:, - # The type of jsonl source. Always `file_id`. - type: :file_id - ) - end - - sig { override.returns({ id: String, type: Symbol }) } - def to_hash - end - end - end - end -end diff --git a/rbi/openai/models/evals/run_cancel_response.rbi b/rbi/openai/models/evals/run_cancel_response.rbi index a305b087..d6f3d390 100644 --- a/rbi/openai/models/evals/run_cancel_response.rbi +++ b/rbi/openai/models/evals/run_cancel_response.rbi @@ -23,11 +23,7 @@ module OpenAI # Information about the run's data source. sig do returns( - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ) + OpenAI::Models::Evals::RunCancelResponse::DataSource::Variants ) end attr_accessor :data_source @@ -111,7 +107,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -176,11 +172,7 @@ module OpenAI id: String, created_at: Integer, data_source: - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ), + OpenAI::Models::Evals::RunCancelResponse::DataSource::Variants, error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), @@ -214,9 +206,897 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses + ) + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses, + OpenAI::Internal::AnyHash + ) + end + + # Determines what populates the `item` namespace in this run's data source. + sig do + returns( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Variants + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Variants + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Determines what populates the `item` namespace in this run's data source. + source:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + source: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Variants, + type: Symbol, + input_messages: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Variants, + model: String, + sampling_params: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + } + ) + end + def to_hash + end + + # Determines what populates the `item` namespace in this run's data source. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses + ) + end + + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Variants + ] + ) + end + def self.variants + end + end + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants, + role: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the `item` namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the `item` namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end end + end sig do override.returns( diff --git a/rbi/openai/models/evals/run_create_params.rbi b/rbi/openai/models/evals/run_create_params.rbi index 878fc731..eba90132 100644 --- a/rbi/openai/models/evals/run_create_params.rbi +++ b/rbi/openai/models/evals/run_create_params.rbi @@ -18,7 +18,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ) ) end @@ -46,7 +46,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -76,7 +76,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -96,10 +96,960 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource ) end + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource, + OpenAI::Internal::AnyHash + ) + end + + # Determines what populates the `item` namespace in this run's data source. + sig do + returns( + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ) + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig do + returns( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol + ) + end + attr_accessor :type + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + sig do + returns( + T.nilable( + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses::OrHash + ), + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, + input_messages: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams::OrHash + ).returns(T.attached_class) + end + def self.new( + # Determines what populates the `item` namespace in this run's data source. + source:, + # The type of run data source. Always `responses`. + type:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil + ) + end + + sig do + override.returns( + { + source: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ), + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::OrSymbol, + input_messages: + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ), + model: String, + sampling_params: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + ) + end + def to_hash + end + + # Determines what populates the `item` namespace in this run's data source. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + ) + end + + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig { returns(T.nilable(OpenAI::ReasoningEffort::OrSymbol)) } + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Variants + ] + ) + end + def self.variants + end + end + + # The type of run data source. Always `responses`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + RESPONSES = + T.let( + :responses, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ), + role: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the `item` namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the `item` namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil + ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end + end + end + sig do override.returns( T::Array[OpenAI::Evals::RunCreateParams::DataSource::Variants] diff --git a/rbi/openai/models/evals/run_create_response.rbi b/rbi/openai/models/evals/run_create_response.rbi index ffa092b4..32cae98d 100644 --- a/rbi/openai/models/evals/run_create_response.rbi +++ b/rbi/openai/models/evals/run_create_response.rbi @@ -23,11 +23,7 @@ module OpenAI # Information about the run's data source. sig do returns( - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ) + OpenAI::Models::Evals::RunCreateResponse::DataSource::Variants ) end attr_accessor :data_source @@ -111,7 +107,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -176,11 +172,7 @@ module OpenAI id: String, created_at: Integer, data_source: - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ), + OpenAI::Models::Evals::RunCreateResponse::DataSource::Variants, error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), @@ -214,9 +206,897 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses + ) + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses, + OpenAI::Internal::AnyHash + ) + end + + # Determines what populates the `item` namespace in this run's data source. + sig do + returns( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Variants + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Variants + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Determines what populates the `item` namespace in this run's data source. + source:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + source: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Variants, + type: Symbol, + input_messages: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Variants, + model: String, + sampling_params: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + } + ) + end + def to_hash + end + + # Determines what populates the `item` namespace in this run's data source. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses + ) + end + + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Variants + ] + ) + end + def self.variants + end + end + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants, + role: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the `item` namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the `item` namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end end + end sig do override.returns( diff --git a/rbi/openai/models/evals/run_list_response.rbi b/rbi/openai/models/evals/run_list_response.rbi index f398957e..272e7bb7 100644 --- a/rbi/openai/models/evals/run_list_response.rbi +++ b/rbi/openai/models/evals/run_list_response.rbi @@ -22,13 +22,7 @@ module OpenAI # Information about the run's data source. sig do - returns( - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ) - ) + returns(OpenAI::Models::Evals::RunListResponse::DataSource::Variants) end attr_accessor :data_source @@ -111,7 +105,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -176,11 +170,7 @@ module OpenAI id: String, created_at: Integer, data_source: - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ), + OpenAI::Models::Evals::RunListResponse::DataSource::Variants, error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), @@ -212,9 +202,897 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunListResponse::DataSource::Responses + ) + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses, + OpenAI::Internal::AnyHash + ) + end + + # Determines what populates the `item` namespace in this run's data source. + sig do + returns( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Variants + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Variants + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Determines what populates the `item` namespace in this run's data source. + source:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + source: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Variants, + type: Symbol, + input_messages: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Variants, + model: String, + sampling_params: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + } + ) + end + def to_hash + end + + # Determines what populates the `item` namespace in this run's data source. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses + ) + end + + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Variants + ] + ) + end + def self.variants + end + end + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants, + role: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the `item` namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the `item` namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end end + end sig do override.returns( diff --git a/rbi/openai/models/evals/run_retrieve_response.rbi b/rbi/openai/models/evals/run_retrieve_response.rbi index 81fb22ff..a51ecb78 100644 --- a/rbi/openai/models/evals/run_retrieve_response.rbi +++ b/rbi/openai/models/evals/run_retrieve_response.rbi @@ -23,11 +23,7 @@ module OpenAI # Information about the run's data source. sig do returns( - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ) + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Variants ) end attr_accessor :data_source @@ -113,7 +109,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::OrHash ), error: OpenAI::Evals::EvalAPIError::OrHash, eval_id: String, @@ -178,11 +174,7 @@ module OpenAI id: String, created_at: Integer, data_source: - T.any( - OpenAI::Evals::CreateEvalJSONLRunDataSource, - OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource - ), + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Variants, error: OpenAI::Evals::EvalAPIError, eval_id: String, metadata: T.nilable(T::Hash[Symbol, String]), @@ -216,9 +208,897 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource, OpenAI::Evals::CreateEvalCompletionsRunDataSource, - OpenAI::Evals::CreateEvalResponsesRunDataSource + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses + ) + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses, + OpenAI::Internal::AnyHash + ) + end + + # Determines what populates the `item` namespace in this run's data source. + sig do + returns( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Variants + ) + end + attr_accessor :source + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Variants + ) + ) + end + attr_reader :input_messages + + sig do + params( + input_messages: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ) + ).void + end + attr_writer :input_messages + + # The name of the model to use for generating completions (e.g. "o3-mini"). + sig { returns(T.nilable(String)) } + attr_reader :model + + sig { params(model: String).void } + attr_writer :model + + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + ) + ) + end + attr_reader :sampling_params + + sig do + params( + sampling_params: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::OrHash + ).void + end + attr_writer :sampling_params + + # A ResponsesRunDataSource object describing a model sampling configuration. + sig do + params( + source: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses::OrHash + ), + input_messages: + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference::OrHash + ), + model: String, + sampling_params: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams::OrHash, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Determines what populates the `item` namespace in this run's data source. + source:, + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + input_messages: nil, + # The name of the model to use for generating completions (e.g. "o3-mini"). + model: nil, + sampling_params: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + source: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Variants, + type: Symbol, + input_messages: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Variants, + model: String, + sampling_params: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + } + ) + end + def to_hash + end + + # Determines what populates the `item` namespace in this run's data source. + module Source + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses + ) + end + + class FileContent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent, + OpenAI::Internal::AnyHash + ) + end + + # The content of the jsonl file. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content + ] + ) + end + attr_accessor :content + + # The type of jsonl source. Always `file_content`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + content: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content::OrHash + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The content of the jsonl file. + content:, + # The type of jsonl source. Always `file_content`. + type: :file_content + ) + end + + sig do + override.returns( + { + content: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content + ], + type: Symbol + } + ) + end + def to_hash + end + + class Content < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content, + OpenAI::Internal::AnyHash + ) + end + + sig { returns(T::Hash[Symbol, T.anything]) } + attr_accessor :item + + sig { returns(T.nilable(T::Hash[Symbol, T.anything])) } + attr_reader :sample + + sig { params(sample: T::Hash[Symbol, T.anything]).void } + attr_writer :sample + + sig do + params( + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + ).returns(T.attached_class) + end + def self.new(item:, sample: nil) + end + + sig do + override.returns( + { + item: T::Hash[Symbol, T.anything], + sample: T::Hash[Symbol, T.anything] + } + ) + end + def to_hash + end + end + end + + class FileID < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID, + OpenAI::Internal::AnyHash + ) + end + + # The identifier of the file. + sig { returns(String) } + attr_accessor :id + + # The type of jsonl source. Always `file_id`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(id: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The identifier of the file. + id:, + # The type of jsonl source. Always `file_id`. + type: :file_id + ) + end + + sig { override.returns({ id: String, type: Symbol }) } + def to_hash + end + end + + class Responses < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses, + OpenAI::Internal::AnyHash + ) + end + + # The type of run data source. Always `responses`. + sig { returns(Symbol) } + attr_accessor :type + + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_after + + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + sig { returns(T.nilable(Integer)) } + attr_accessor :created_before + + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + sig { returns(T.nilable(String)) } + attr_accessor :instructions_search + + # Metadata filter for the responses. This is a query parameter used to select + # responses. + sig { returns(T.nilable(T.anything)) } + attr_accessor :metadata + + # The name of the model to find responses for. This is a query parameter used to + # select responses. + sig { returns(T.nilable(String)) } + attr_accessor :model + + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + sig do + returns(T.nilable(OpenAI::ReasoningEffort::TaggedSymbol)) + end + attr_accessor :reasoning_effort + + # Sampling temperature. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :temperature + + # List of tool names. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :tools + + # Nucleus sampling parameter. This is a query parameter used to select responses. + sig { returns(T.nilable(Float)) } + attr_accessor :top_p + + # List of user identifiers. This is a query parameter used to select responses. + sig { returns(T.nilable(T::Array[String])) } + attr_accessor :users + + # A EvalResponsesSource object describing a run data source configuration. + sig do + params( + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::OrSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Only include items created after this timestamp (inclusive). This is a query + # parameter used to select responses. + created_after: nil, + # Only include items created before this timestamp (inclusive). This is a query + # parameter used to select responses. + created_before: nil, + # Optional string to search the 'instructions' field. This is a query parameter + # used to select responses. + instructions_search: nil, + # Metadata filter for the responses. This is a query parameter used to select + # responses. + metadata: nil, + # The name of the model to find responses for. This is a query parameter used to + # select responses. + model: nil, + # Optional reasoning effort parameter. This is a query parameter used to select + # responses. + reasoning_effort: nil, + # Sampling temperature. This is a query parameter used to select responses. + temperature: nil, + # List of tool names. This is a query parameter used to select responses. + tools: nil, + # Nucleus sampling parameter. This is a query parameter used to select responses. + top_p: nil, + # List of user identifiers. This is a query parameter used to select responses. + users: nil, + # The type of run data source. Always `responses`. + type: :responses + ) + end + + sig do + override.returns( + { + type: Symbol, + created_after: T.nilable(Integer), + created_before: T.nilable(Integer), + instructions_search: T.nilable(String), + metadata: T.nilable(T.anything), + model: T.nilable(String), + reasoning_effort: + T.nilable(OpenAI::ReasoningEffort::TaggedSymbol), + temperature: T.nilable(Float), + tools: T.nilable(T::Array[String]), + top_p: T.nilable(Float), + users: T.nilable(T::Array[String]) + } + ) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Variants + ] + ) + end + def self.variants + end + end + + # Used when sampling from a model. Dictates the structure of the messages passed + # into the model. Can either be a reference to a prebuilt trajectory (ie, + # `item.input_trajectory`), or a template with variable references to the `item` + # namespace. + module InputMessages + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference + ) + end + + class Template < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template, + OpenAI::Internal::AnyHash + ) + end + + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + sig do + returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + attr_accessor :template + + # The type of input messages. Always `template`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params( + template: + T::Array[ + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::OrHash + ) + ], + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A list of chat messages forming the prompt or context. May include variable + # references to the `item` namespace, ie {{item.name}}. + template:, + # The type of input messages. Always `template`. + type: :template + ) + end + + sig do + override.returns( + { + template: + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ], + type: Symbol + } + ) + end + def to_hash + end + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + module Template + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + ) + end + + class ChatMessage < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage, + OpenAI::Internal::AnyHash + ) + end + + # The content of the message. + sig { returns(String) } + attr_accessor :content + + # The role of the message (e.g. "system", "assistant", "user"). + sig { returns(String) } + attr_accessor :role + + sig do + params(content: String, role: String).returns( + T.attached_class + ) + end + def self.new( + # The content of the message. + content:, + # The role of the message (e.g. "system", "assistant", "user"). + role: + ) + end + + sig { override.returns({ content: String, role: String }) } + def to_hash + end + end + + class EvalItem < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText::OrHash + ), + role: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::OrSymbol, + type: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants, + role: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol, + type: + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::Variants + ] + ) + end + def self.variants + end + end + end + + class ItemReference < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference, + OpenAI::Internal::AnyHash + ) + end + + # A reference to a variable in the `item` namespace. Ie, "item.name" + sig { returns(String) } + attr_accessor :item_reference + + # The type of input messages. Always `item_reference`. + sig { returns(Symbol) } + attr_accessor :type + + sig do + params(item_reference: String, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # A reference to a variable in the `item` namespace. Ie, "item.name" + item_reference:, + # The type of input messages. Always `item_reference`. + type: :item_reference + ) + end + + sig do + override.returns({ item_reference: String, type: Symbol }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Variants + ] + ) + end + def self.variants + end + end + + class SamplingParams < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams, + OpenAI::Internal::AnyHash + ) + end + + # The maximum number of tokens in the generated output. + sig { returns(T.nilable(Integer)) } + attr_reader :max_completion_tokens + + sig { params(max_completion_tokens: Integer).void } + attr_writer :max_completion_tokens + + # A seed value to initialize the randomness, during sampling. + sig { returns(T.nilable(Integer)) } + attr_reader :seed + + sig { params(seed: Integer).void } + attr_writer :seed + + # A higher temperature increases randomness in the outputs. + sig { returns(T.nilable(Float)) } + attr_reader :temperature + + sig { params(temperature: Float).void } + attr_writer :temperature + + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + sig { returns(T.nilable(Float)) } + attr_reader :top_p + + sig { params(top_p: Float).void } + attr_writer :top_p + + sig do + params( + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + ).returns(T.attached_class) + end + def self.new( + # The maximum number of tokens in the generated output. + max_completion_tokens: nil, + # A seed value to initialize the randomness, during sampling. + seed: nil, + # A higher temperature increases randomness in the outputs. + temperature: nil, + # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. + top_p: nil ) + end + + sig do + override.returns( + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + ) + end + def to_hash + end end + end sig do override.returns( diff --git a/rbi/openai/models/file_create_params.rbi b/rbi/openai/models/file_create_params.rbi index d3a1c945..493d915c 100644 --- a/rbi/openai/models/file_create_params.rbi +++ b/rbi/openai/models/file_create_params.rbi @@ -12,7 +12,7 @@ module OpenAI end # The File object (not file name) to be uploaded. - sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } + sig { returns(OpenAI::Internal::FileInput) } attr_accessor :file # The intended purpose of the uploaded file. One of: - `assistants`: Used in the @@ -24,7 +24,7 @@ module OpenAI sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, purpose: OpenAI::FilePurpose::OrSymbol, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) @@ -44,7 +44,7 @@ module OpenAI sig do override.returns( { - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, purpose: OpenAI::FilePurpose::OrSymbol, request_options: OpenAI::RequestOptions } diff --git a/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi b/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi index 1c5d0dbd..bca68bdb 100644 --- a/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi +++ b/rbi/openai/models/fine_tuning/alpha/grader_run_params.rbi @@ -36,7 +36,9 @@ module OpenAI # The reference answer for the evaluation. sig do - returns(T.any(String, T.anything, T::Array[T.anything], Float)) + returns( + OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants + ) end attr_accessor :reference_answer @@ -52,7 +54,7 @@ module OpenAI ), model_sample: String, reference_answer: - T.any(String, T.anything, T::Array[T.anything], Float), + OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -80,7 +82,7 @@ module OpenAI ), model_sample: String, reference_answer: - T.any(String, T.anything, T::Array[T.anything], Float), + OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants, request_options: OpenAI::RequestOptions } ) diff --git a/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi b/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi index 138bf221..20651675 100644 --- a/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi +++ b/rbi/openai/models/fine_tuning/alpha/grader_validate_response.rbi @@ -17,13 +17,7 @@ module OpenAI sig do returns( T.nilable( - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) + OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::Grader::Variants ) ) end @@ -65,13 +59,7 @@ module OpenAI override.returns( { grader: - T.any( - OpenAI::Graders::StringCheckGrader, - OpenAI::Graders::TextSimilarityGrader, - OpenAI::Graders::PythonGrader, - OpenAI::Graders::ScoreModelGrader, - OpenAI::Graders::MultiGrader - ) + OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::Grader::Variants } ) end diff --git a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi index e43f6cd0..8e4d7a3a 100644 --- a/rbi/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/openai/models/fine_tuning/fine_tuning_job.rbi @@ -318,12 +318,24 @@ module OpenAI # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. - sig { returns(T.nilable(T.any(T.anything, Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize::Variants + ) + ) + end attr_accessor :batch_size # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. - sig { returns(T.nilable(T.any(Symbol, Float))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier::Variants + ) + ) + end attr_reader :learning_rate_multiplier sig { params(learning_rate_multiplier: T.any(Symbol, Float)).void } @@ -331,7 +343,13 @@ module OpenAI # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. - sig { returns(T.nilable(T.any(Symbol, Integer))) } + sig do + returns( + T.nilable( + OpenAI::FineTuning::FineTuningJob::Hyperparameters::NEpochs::Variants + ) + ) + end attr_reader :n_epochs sig { params(n_epochs: T.any(Symbol, Integer)).void } @@ -362,9 +380,14 @@ module OpenAI sig do override.returns( { - batch_size: T.nilable(T.any(T.anything, Symbol, Integer)), - learning_rate_multiplier: T.any(Symbol, Float), - n_epochs: T.any(Symbol, Integer) + batch_size: + T.nilable( + OpenAI::FineTuning::FineTuningJob::Hyperparameters::BatchSize::Variants + ), + learning_rate_multiplier: + OpenAI::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier::Variants, + n_epochs: + OpenAI::FineTuning::FineTuningJob::Hyperparameters::NEpochs::Variants } ) end diff --git a/rbi/openai/models/graders/label_model_grader.rbi b/rbi/openai/models/graders/label_model_grader.rbi index 42632db8..9d062b87 100644 --- a/rbi/openai/models/graders/label_model_grader.rbi +++ b/rbi/openai/models/graders/label_model_grader.rbi @@ -11,7 +11,7 @@ module OpenAI T.any(OpenAI::Graders::LabelModelGrader, OpenAI::Internal::AnyHash) end - sig { returns(T::Array[OpenAI::EvalItem]) } + sig { returns(T::Array[OpenAI::Graders::LabelModelGrader::Input]) } attr_accessor :input # The labels to assign to each item in the evaluation. @@ -38,7 +38,7 @@ module OpenAI # the evaluation. sig do params( - input: T::Array[OpenAI::EvalItem::OrHash], + input: T::Array[OpenAI::Graders::LabelModelGrader::Input::OrHash], labels: T::Array[String], model: String, name: String, @@ -64,7 +64,7 @@ module OpenAI sig do override.returns( { - input: T::Array[OpenAI::EvalItem], + input: T::Array[OpenAI::Graders::LabelModelGrader::Input], labels: T::Array[String], model: String, name: String, @@ -75,6 +75,225 @@ module OpenAI end def to_hash end + + class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader::Input, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns(OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText::OrHash + ), + role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ), + role: OpenAI::Graders::LabelModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::LabelModelGrader::Input::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::LabelModelGrader::Input::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Graders::LabelModelGrader::Input::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::LabelModelGrader::Input::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::LabelModelGrader::Input::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::LabelModelGrader::Input::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Graders::LabelModelGrader::Input::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::LabelModelGrader::Input::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end end end end diff --git a/rbi/openai/models/graders/multi_grader.rbi b/rbi/openai/models/graders/multi_grader.rbi index bed184f8..bbf5c142 100644 --- a/rbi/openai/models/graders/multi_grader.rbi +++ b/rbi/openai/models/graders/multi_grader.rbi @@ -35,7 +35,7 @@ module OpenAI sig { returns(String) } attr_accessor :name - # The type of grader. + # The object type, which is always `multi`. sig { returns(Symbol) } attr_accessor :type @@ -65,7 +65,7 @@ module OpenAI graders:, # The name of the grader. name:, - # The type of grader. + # The object type, which is always `multi`. type: :multi ) end diff --git a/rbi/openai/models/graders/score_model_grader.rbi b/rbi/openai/models/graders/score_model_grader.rbi index d17b745c..ab602eda 100644 --- a/rbi/openai/models/graders/score_model_grader.rbi +++ b/rbi/openai/models/graders/score_model_grader.rbi @@ -12,7 +12,7 @@ module OpenAI end # The input text. This may include template strings. - sig { returns(T::Array[OpenAI::EvalItem]) } + sig { returns(T::Array[OpenAI::Graders::ScoreModelGrader::Input]) } attr_accessor :input # The model to use for the evaluation. @@ -44,7 +44,7 @@ module OpenAI # A ScoreModelGrader object that uses a model to assign a score to the input. sig do params( - input: T::Array[OpenAI::EvalItem::OrHash], + input: T::Array[OpenAI::Graders::ScoreModelGrader::Input::OrHash], model: String, name: String, range: T::Array[Float], @@ -71,7 +71,7 @@ module OpenAI sig do override.returns( { - input: T::Array[OpenAI::EvalItem], + input: T::Array[OpenAI::Graders::ScoreModelGrader::Input], model: String, name: String, type: Symbol, @@ -82,6 +82,225 @@ module OpenAI end def to_hash end + + class Input < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::ScoreModelGrader::Input, + OpenAI::Internal::AnyHash + ) + end + + # Text inputs to the model - can contain template strings. + sig do + returns( + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ) + ) + end + attr_accessor :content + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + sig do + returns(OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol) + end + attr_accessor :role + + # The type of the message input. Always `message`. + sig do + returns( + T.nilable( + OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + ) + ) + end + attr_reader :type + + sig do + params( + type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + ).void + end + attr_writer :type + + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + sig do + params( + content: + T.any( + String, + OpenAI::Responses::ResponseInputText::OrHash, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText::OrHash + ), + role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + ).returns(T.attached_class) + end + def self.new( + # Text inputs to the model - can contain template strings. + content:, + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + role:, + # The type of the message input. Always `message`. + type: nil + ) + end + + sig do + override.returns( + { + content: + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ), + role: OpenAI::Graders::ScoreModelGrader::Input::Role::OrSymbol, + type: OpenAI::Graders::ScoreModelGrader::Input::Type::OrSymbol + } + ) + end + def to_hash + end + + # Text inputs to the model - can contain template strings. + module Content + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::ResponseInputText, + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + ) + end + + class OutputText < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText, + OpenAI::Internal::AnyHash + ) + end + + # The text output from the model. + sig { returns(String) } + attr_accessor :text + + # The type of the output text. Always `output_text`. + sig { returns(Symbol) } + attr_accessor :type + + # A text output from the model. + sig do + params(text: String, type: Symbol).returns(T.attached_class) + end + def self.new( + # The text output from the model. + text:, + # The type of the output text. Always `output_text`. + type: :output_text + ) + end + + sig { override.returns({ text: String, type: Symbol }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Graders::ScoreModelGrader::Input::Content::Variants + ] + ) + end + def self.variants + end + end + + # The role of the message input. One of `user`, `assistant`, `system`, or + # `developer`. + module Role + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::ScoreModelGrader::Input::Role) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + USER = + T.let( + :user, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + ASSISTANT = + T.let( + :assistant, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + SYSTEM = + T.let( + :system, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + DEVELOPER = + T.let( + :developer, + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::ScoreModelGrader::Input::Role::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The type of the message input. Always `message`. + module Type + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Graders::ScoreModelGrader::Input::Type) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + MESSAGE = + T.let( + :message, + OpenAI::Graders::ScoreModelGrader::Input::Type::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Graders::ScoreModelGrader::Input::Type::TaggedSymbol + ] + ) + end + def self.values + end + end + end end end end diff --git a/rbi/openai/models/image_create_variation_params.rbi b/rbi/openai/models/image_create_variation_params.rbi index a6be588e..1c53b22b 100644 --- a/rbi/openai/models/image_create_variation_params.rbi +++ b/rbi/openai/models/image_create_variation_params.rbi @@ -13,7 +13,7 @@ module OpenAI # The image to use as the basis for the variation(s). Must be a valid PNG file, # less than 4MB, and square. - sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } + sig { returns(OpenAI::Internal::FileInput) } attr_accessor :image # The model to use for image generation. Only `dall-e-2` is supported at this @@ -55,7 +55,7 @@ module OpenAI sig do params( - image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + image: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: @@ -94,7 +94,7 @@ module OpenAI sig do override.returns( { - image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + image: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: diff --git a/rbi/openai/models/image_edit_params.rbi b/rbi/openai/models/image_edit_params.rbi index 7225f73f..b03c85dd 100644 --- a/rbi/openai/models/image_edit_params.rbi +++ b/rbi/openai/models/image_edit_params.rbi @@ -18,17 +18,7 @@ module OpenAI # # For `dall-e-2`, you can only provide one image, and it should be a square `png` # file less than 4MB. - sig do - returns( - T.any( - Pathname, - StringIO, - IO, - OpenAI::FilePart, - T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] - ) - ) - end + sig { returns(OpenAI::ImageEditParams::Image::Variants) } attr_accessor :image # A text description of the desired image(s). The maximum length is 1000 @@ -50,12 +40,10 @@ module OpenAI # indicate where `image` should be edited. If there are multiple images provided, # the mask will be applied on the first image. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. - sig do - returns(T.nilable(T.any(Pathname, StringIO, IO, OpenAI::FilePart))) - end + sig { returns(T.nilable(OpenAI::Internal::FileInput)) } attr_reader :mask - sig { params(mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart)).void } + sig { params(mask: OpenAI::Internal::FileInput).void } attr_writer :mask # The model to use for image generation. Only `dall-e-2` and `gpt-image-1` are @@ -100,17 +88,10 @@ module OpenAI sig do params( - image: - T.any( - Pathname, - StringIO, - IO, - OpenAI::FilePart, - T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] - ), + image: OpenAI::ImageEditParams::Image::Variants, prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), - mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + mask: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), @@ -176,18 +157,11 @@ module OpenAI sig do override.returns( { - image: - T.any( - Pathname, - StringIO, - IO, - OpenAI::FilePart, - T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] - ), + image: OpenAI::ImageEditParams::Image::Variants, prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), - mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + mask: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), diff --git a/rbi/openai/models/moderation_create_params.rbi b/rbi/openai/models/moderation_create_params.rbi index 1f2a76f8..0d5c60fa 100644 --- a/rbi/openai/models/moderation_create_params.rbi +++ b/rbi/openai/models/moderation_create_params.rbi @@ -13,20 +13,7 @@ module OpenAI # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. - sig do - returns( - T.any( - String, - T::Array[String], - T::Array[ - T.any( - OpenAI::ModerationImageURLInput, - OpenAI::ModerationTextInput - ) - ] - ) - ) - end + sig { returns(OpenAI::ModerationCreateParams::Input::Variants) } attr_accessor :input # The content moderation model you would like to use. Learn more in @@ -45,17 +32,7 @@ module OpenAI sig do params( - input: - T.any( - String, - T::Array[String], - T::Array[ - T.any( - OpenAI::ModerationImageURLInput::OrHash, - OpenAI::ModerationTextInput::OrHash - ) - ] - ), + input: OpenAI::ModerationCreateParams::Input::Variants, model: T.any(String, OpenAI::ModerationModel::OrSymbol), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) @@ -76,17 +53,7 @@ module OpenAI sig do override.returns( { - input: - T.any( - String, - T::Array[String], - T::Array[ - T.any( - OpenAI::ModerationImageURLInput, - OpenAI::ModerationTextInput - ) - ] - ), + input: OpenAI::ModerationCreateParams::Input::Variants, model: T.any(String, OpenAI::ModerationModel::OrSymbol), request_options: OpenAI::RequestOptions } @@ -105,12 +72,7 @@ module OpenAI T.any( String, T::Array[String], - T::Array[ - T.any( - OpenAI::ModerationImageURLInput, - OpenAI::ModerationTextInput - ) - ] + T::Array[OpenAI::ModerationMultiModalInput::Variants] ) end diff --git a/rbi/openai/models/response_format_json_schema.rbi b/rbi/openai/models/response_format_json_schema.rbi index 8155e9c9..e9796e15 100644 --- a/rbi/openai/models/response_format_json_schema.rbi +++ b/rbi/openai/models/response_format_json_schema.rbi @@ -9,12 +9,23 @@ module OpenAI end # Structured Outputs configuration options, including a JSON Schema. - sig { returns(OpenAI::ResponseFormatJSONSchema::JSONSchema) } + sig do + returns( + T.any( + OpenAI::ResponseFormatJSONSchema::JSONSchema::OrHash, + OpenAI::StructuredOutput::JsonSchemaConverter + ) + ) + end attr_reader :json_schema sig do params( - json_schema: OpenAI::ResponseFormatJSONSchema::JSONSchema::OrHash + json_schema: + T.any( + OpenAI::ResponseFormatJSONSchema::JSONSchema::OrHash, + OpenAI::StructuredOutput::JsonSchemaConverter + ) ).void end attr_writer :json_schema @@ -28,7 +39,11 @@ module OpenAI # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). sig do params( - json_schema: OpenAI::ResponseFormatJSONSchema::JSONSchema::OrHash, + json_schema: + T.any( + OpenAI::ResponseFormatJSONSchema::JSONSchema::OrHash, + OpenAI::StructuredOutput::JsonSchemaConverter + ), type: Symbol ).returns(T.attached_class) end diff --git a/rbi/openai/models/responses/easy_input_message.rbi b/rbi/openai/models/responses/easy_input_message.rbi index f4f1e6e9..10935954 100644 --- a/rbi/openai/models/responses/easy_input_message.rbi +++ b/rbi/openai/models/responses/easy_input_message.rbi @@ -14,20 +14,7 @@ module OpenAI # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. - sig do - returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ] - ) - ) - end + sig { returns(OpenAI::Responses::EasyInputMessage::Content::Variants) } attr_accessor :content # The role of the message input. One of `user`, `assistant`, `system`, or @@ -55,17 +42,7 @@ module OpenAI # interactions. sig do params( - content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText::OrHash, - OpenAI::Responses::ResponseInputImage::OrHash, - OpenAI::Responses::ResponseInputFile::OrHash - ) - ] - ), + content: OpenAI::Responses::EasyInputMessage::Content::Variants, role: OpenAI::Responses::EasyInputMessage::Role::OrSymbol, type: OpenAI::Responses::EasyInputMessage::Type::OrSymbol ).returns(T.attached_class) @@ -85,17 +62,7 @@ module OpenAI sig do override.returns( { - content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ] - ), + content: OpenAI::Responses::EasyInputMessage::Content::Variants, role: OpenAI::Responses::EasyInputMessage::Role::OrSymbol, type: OpenAI::Responses::EasyInputMessage::Type::OrSymbol } @@ -113,13 +80,7 @@ module OpenAI T.type_alias do T.any( String, - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ] + T::Array[OpenAI::Responses::ResponseInputContent::Variants] ) end diff --git a/rbi/openai/models/responses/input_item_list_params.rbi b/rbi/openai/models/responses/input_item_list_params.rbi index 403928db..f495c4f3 100644 --- a/rbi/openai/models/responses/input_item_list_params.rbi +++ b/rbi/openai/models/responses/input_item_list_params.rbi @@ -53,7 +53,7 @@ module OpenAI sig { params(limit: Integer).void } attr_writer :limit - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. @@ -92,7 +92,7 @@ module OpenAI # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. limit: nil, - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. @@ -117,7 +117,7 @@ module OpenAI def to_hash end - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. diff --git a/rbi/openai/models/responses/response.rbi b/rbi/openai/models/responses/response.rbi index fcc7367f..97408461 100644 --- a/rbi/openai/models/responses/response.rbi +++ b/rbi/openai/models/responses/response.rbi @@ -65,15 +65,7 @@ module OpenAI # and price points. Refer to the # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. - sig do - returns( - T.any( - String, - OpenAI::ChatModel::TaggedSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol - ) - ) - end + sig { returns(OpenAI::ResponsesModel::Variants) } attr_accessor :model # The object type of this resource - always set to `response`. @@ -88,18 +80,7 @@ module OpenAI # an `assistant` message with the content generated by the model, you might # consider using the `output_text` property where supported in SDKs. sig do - returns( - T::Array[ - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ) - ] - ) + returns(T::Array[OpenAI::Responses::ResponseOutputItem::Variants]) end attr_accessor :output @@ -117,15 +98,7 @@ module OpenAI # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. - sig do - returns( - T.any( - OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction - ) - ) - end + sig { returns(OpenAI::Responses::Response::ToolChoice::Variants) } attr_accessor :tool_choice # An array of tools the model may call while generating a response. You can @@ -142,18 +115,7 @@ module OpenAI # - **Function calls (custom tools)**: Functions that are defined by you, enabling # the model to call your own code. Learn more about # [function calling](https://platform.openai.com/docs/guides/function-calling). - sig do - returns( - T::Array[ - T.any( - OpenAI::Responses::FileSearchTool, - OpenAI::Responses::FunctionTool, - OpenAI::Responses::ComputerTool, - OpenAI::Responses::WebSearchTool - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Responses::Tool::Variants]) } attr_accessor :tools # An alternative to sampling with temperature, called nucleus sampling, where the @@ -164,6 +126,11 @@ module OpenAI sig { returns(T.nilable(Float)) } attr_accessor :top_p + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :background + # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). @@ -211,7 +178,7 @@ module OpenAI attr_accessor :service_tier # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, `cancelled`, `queued`, or `incomplete`. sig do returns(T.nilable(OpenAI::Responses::ResponseStatus::TaggedSymbol)) end @@ -253,8 +220,8 @@ module OpenAI sig { params(usage: OpenAI::Responses::ResponseUsage::OrHash).void } attr_writer :usage - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -285,7 +252,13 @@ module OpenAI OpenAI::Responses::ResponseFunctionToolCall::OrHash, OpenAI::Responses::ResponseFunctionWebSearch::OrHash, OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpListTools::OrHash, + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash ) ], parallel_tool_calls: T::Boolean, @@ -299,13 +272,18 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], top_p: T.nilable(Float), + background: T.nilable(T::Boolean), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Reasoning::OrHash), @@ -389,6 +367,9 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p:, + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + background: nil, # An upper bound for the number of tokens that can be generated for a response, # including visible output tokens and # [reasoning tokens](https://platform.openai.com/docs/guides/reasoning). @@ -421,7 +402,7 @@ module OpenAI # utilized. service_tier: nil, # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, `cancelled`, `queued`, or `incomplete`. status: nil, # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: @@ -440,8 +421,8 @@ module OpenAI # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. usage: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # The object type of this resource - always set to `response`. @@ -459,42 +440,15 @@ module OpenAI T.nilable(OpenAI::Responses::Response::IncompleteDetails), instructions: T.nilable(String), metadata: T.nilable(T::Hash[Symbol, String]), - model: - T.any( - String, - OpenAI::ChatModel::TaggedSymbol, - OpenAI::ResponsesModel::ResponsesOnlyModel::TaggedSymbol - ), + model: OpenAI::ResponsesModel::Variants, object: Symbol, - output: - T::Array[ - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ) - ], + output: T::Array[OpenAI::Responses::ResponseOutputItem::Variants], parallel_tool_calls: T::Boolean, temperature: T.nilable(Float), - tool_choice: - T.any( - OpenAI::Responses::ToolChoiceOptions::TaggedSymbol, - OpenAI::Responses::ToolChoiceTypes, - OpenAI::Responses::ToolChoiceFunction - ), - tools: - T::Array[ - T.any( - OpenAI::Responses::FileSearchTool, - OpenAI::Responses::FunctionTool, - OpenAI::Responses::ComputerTool, - OpenAI::Responses::WebSearchTool - ) - ], + tool_choice: OpenAI::Responses::Response::ToolChoice::Variants, + tools: T::Array[OpenAI::Responses::Tool::Variants], top_p: T.nilable(Float), + background: T.nilable(T::Boolean), max_output_tokens: T.nilable(Integer), previous_response_id: T.nilable(String), reasoning: T.nilable(OpenAI::Reasoning), diff --git a/rbi/openai/models/responses/response_audio_delta_event.rbi b/rbi/openai/models/responses/response_audio_delta_event.rbi index 87c6eff6..fbc13de2 100644 --- a/rbi/openai/models/responses/response_audio_delta_event.rbi +++ b/rbi/openai/models/responses/response_audio_delta_event.rbi @@ -16,21 +16,35 @@ module OpenAI sig { returns(String) } attr_accessor :delta + # A sequence number for this chunk of the stream response. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.audio.delta`. sig { returns(Symbol) } attr_accessor :type # Emitted when there is a partial audio response. - sig { params(delta: String, type: Symbol).returns(T.attached_class) } + sig do + params(delta: String, sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # A chunk of Base64 encoded response audio bytes. delta:, + # A sequence number for this chunk of the stream response. + sequence_number:, # The type of the event. Always `response.audio.delta`. type: :"response.audio.delta" ) end - sig { override.returns({ delta: String, type: Symbol }) } + sig do + override.returns( + { delta: String, sequence_number: Integer, type: Symbol } + ) + end def to_hash end end diff --git a/rbi/openai/models/responses/response_audio_done_event.rbi b/rbi/openai/models/responses/response_audio_done_event.rbi index d9a86980..20be4298 100644 --- a/rbi/openai/models/responses/response_audio_done_event.rbi +++ b/rbi/openai/models/responses/response_audio_done_event.rbi @@ -12,19 +12,29 @@ module OpenAI ) end + # The sequence number of the delta. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.audio.done`. sig { returns(Symbol) } attr_accessor :type # Emitted when the audio response is complete. - sig { params(type: Symbol).returns(T.attached_class) } + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( + # The sequence number of the delta. + sequence_number:, # The type of the event. Always `response.audio.done`. type: :"response.audio.done" ) end - sig { override.returns({ type: Symbol }) } + sig { override.returns({ sequence_number: Integer, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi b/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi index 74e69b9b..ef75b6be 100644 --- a/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi +++ b/rbi/openai/models/responses/response_audio_transcript_delta_event.rbi @@ -16,21 +16,35 @@ module OpenAI sig { returns(String) } attr_accessor :delta + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.audio.transcript.delta`. sig { returns(Symbol) } attr_accessor :type # Emitted when there is a partial transcript of audio. - sig { params(delta: String, type: Symbol).returns(T.attached_class) } + sig do + params(delta: String, sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( # The partial transcript of the audio response. delta:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.audio.transcript.delta`. type: :"response.audio.transcript.delta" ) end - sig { override.returns({ delta: String, type: Symbol }) } + sig do + override.returns( + { delta: String, sequence_number: Integer, type: Symbol } + ) + end def to_hash end end diff --git a/rbi/openai/models/responses/response_audio_transcript_done_event.rbi b/rbi/openai/models/responses/response_audio_transcript_done_event.rbi index d0d670a2..f6a29e91 100644 --- a/rbi/openai/models/responses/response_audio_transcript_done_event.rbi +++ b/rbi/openai/models/responses/response_audio_transcript_done_event.rbi @@ -12,19 +12,29 @@ module OpenAI ) end + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.audio.transcript.done`. sig { returns(Symbol) } attr_accessor :type # Emitted when the full audio transcript is completed. - sig { params(type: Symbol).returns(T.attached_class) } + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end def self.new( + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.audio.transcript.done`. type: :"response.audio.transcript.done" ) end - sig { override.returns({ type: Symbol }) } + sig { override.returns({ sequence_number: Integer, type: Symbol }) } def to_hash end end diff --git a/rbi/openai/models/responses/response_cancel_params.rbi b/rbi/openai/models/responses/response_cancel_params.rbi new file mode 100644 index 00000000..296eaaab --- /dev/null +++ b/rbi/openai/models/responses/response_cancel_params.rbi @@ -0,0 +1,32 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseCancelParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseCancelParams, + OpenAI::Internal::AnyHash + ) + end + + sig do + params(request_options: OpenAI::RequestOptions::OrHash).returns( + T.attached_class + ) + end + def self.new(request_options: {}) + end + + sig { override.returns({ request_options: OpenAI::RequestOptions }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi index 220e24ca..a26838ef 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_delta_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.code_interpreter_call.code.delta`. sig { returns(Symbol) } attr_accessor :type # Emitted when a partial code snippet is added by the code interpreter. sig do - params(delta: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + delta: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The partial code snippet added by the code interpreter. delta:, # The index of the output item that the code interpreter call is in progress. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.code_interpreter_call.code.delta`. type: :"response.code_interpreter_call.code.delta" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { delta: String, output_index: Integer, type: Symbol } + { + delta: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi index 43a90d5f..fdd8c46e 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_code_done_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.code_interpreter_call.code.done`. sig { returns(Symbol) } attr_accessor :type # Emitted when code snippet output is finalized by the code interpreter. sig do - params(code: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + code: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The final code snippet output by the code interpreter. code:, # The index of the output item that the code interpreter call is in progress. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.code_interpreter_call.code.done`. type: :"response.code_interpreter_call.code.done" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { code: String, output_index: Integer, type: Symbol } + { + code: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi index be772f72..5e40e50d 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_completed_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.code_interpreter_call.completed`. sig { returns(Symbol) } attr_accessor :type @@ -38,6 +42,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -46,6 +51,8 @@ module OpenAI code_interpreter_call:, # The index of the output item that the code interpreter call is in progress. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.code_interpreter_call.completed`. type: :"response.code_interpreter_call.completed" ) @@ -57,6 +64,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi index 578fdb15..bf880f76 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_in_progress_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.code_interpreter_call.in_progress`. sig { returns(Symbol) } attr_accessor :type @@ -38,6 +42,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -46,6 +51,8 @@ module OpenAI code_interpreter_call:, # The index of the output item that the code interpreter call is in progress. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.code_interpreter_call.in_progress`. type: :"response.code_interpreter_call.in_progress" ) @@ -57,6 +64,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi index bdf1313a..eb135de8 100644 --- a/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_call_interpreting_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.code_interpreter_call.interpreting`. sig { returns(Symbol) } attr_accessor :type @@ -38,6 +42,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -46,6 +51,8 @@ module OpenAI code_interpreter_call:, # The index of the output item that the code interpreter call is in progress. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.code_interpreter_call.interpreting`. type: :"response.code_interpreter_call.interpreting" ) @@ -57,6 +64,7 @@ module OpenAI code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi index 6d787089..95089eaa 100644 --- a/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -36,7 +36,7 @@ module OpenAI # The status of the code interpreter tool call. sig do returns( - OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol ) end attr_accessor :status @@ -45,6 +45,13 @@ module OpenAI sig { returns(Symbol) } attr_accessor :type + # The ID of the container used to run the code. + sig { returns(T.nilable(String)) } + attr_reader :container_id + + sig { params(container_id: String).void } + attr_writer :container_id + # A tool call to run code. sig do params( @@ -59,6 +66,7 @@ module OpenAI ], status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, + container_id: String, type: Symbol ).returns(T.attached_class) end @@ -71,6 +79,8 @@ module OpenAI results:, # The status of the code interpreter tool call. status:, + # The ID of the container used to run the code. + container_id: nil, # The type of the code interpreter tool call. Always `code_interpreter_call`. type: :code_interpreter_call ) @@ -89,15 +99,16 @@ module OpenAI ) ], status: - OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol, - type: Symbol + OpenAI::Responses::ResponseCodeInterpreterToolCall::Status::OrSymbol, + type: Symbol, + container_id: String } ) end def to_hash end - # The output of a code interpreter tool call that is text. + # The output of a code interpreter tool. module Result extend OpenAI::Internal::Type::Union diff --git a/rbi/openai/models/responses/response_completed_event.rbi b/rbi/openai/models/responses/response_completed_event.rbi index 18dd991a..a583bacf 100644 --- a/rbi/openai/models/responses/response_completed_event.rbi +++ b/rbi/openai/models/responses/response_completed_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number for this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.completed`. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # Properties of the completed response. response:, + # The sequence number for this event. + sequence_number:, # The type of the event. Always `response.completed`. type: :"response.completed" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_content_part_added_event.rbi b/rbi/openai/models/responses/response_content_part_added_event.rbi index 3cc852cc..82ef446c 100644 --- a/rbi/openai/models/responses/response_content_part_added_event.rbi +++ b/rbi/openai/models/responses/response_content_part_added_event.rbi @@ -27,14 +27,15 @@ module OpenAI # The content part that was added. sig do returns( - T.any( - OpenAI::Responses::ResponseOutputText, - OpenAI::Responses::ResponseOutputRefusal - ) + OpenAI::Responses::ResponseContentPartAddedEvent::Part::Variants ) end attr_accessor :part + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.content_part.added`. sig { returns(Symbol) } attr_accessor :type @@ -50,6 +51,7 @@ module OpenAI OpenAI::Responses::ResponseOutputText::OrHash, OpenAI::Responses::ResponseOutputRefusal::OrHash ), + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -62,6 +64,8 @@ module OpenAI output_index:, # The content part that was added. part:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.content_part.added`. type: :"response.content_part.added" ) @@ -74,10 +78,8 @@ module OpenAI item_id: String, output_index: Integer, part: - T.any( - OpenAI::Responses::ResponseOutputText, - OpenAI::Responses::ResponseOutputRefusal - ), + OpenAI::Responses::ResponseContentPartAddedEvent::Part::Variants, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_content_part_done_event.rbi b/rbi/openai/models/responses/response_content_part_done_event.rbi index 6bf2b951..51be81c7 100644 --- a/rbi/openai/models/responses/response_content_part_done_event.rbi +++ b/rbi/openai/models/responses/response_content_part_done_event.rbi @@ -27,14 +27,15 @@ module OpenAI # The content part that is done. sig do returns( - T.any( - OpenAI::Responses::ResponseOutputText, - OpenAI::Responses::ResponseOutputRefusal - ) + OpenAI::Responses::ResponseContentPartDoneEvent::Part::Variants ) end attr_accessor :part + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.content_part.done`. sig { returns(Symbol) } attr_accessor :type @@ -50,6 +51,7 @@ module OpenAI OpenAI::Responses::ResponseOutputText::OrHash, OpenAI::Responses::ResponseOutputRefusal::OrHash ), + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -62,6 +64,8 @@ module OpenAI output_index:, # The content part that is done. part:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.content_part.done`. type: :"response.content_part.done" ) @@ -74,10 +78,8 @@ module OpenAI item_id: String, output_index: Integer, part: - T.any( - OpenAI::Responses::ResponseOutputText, - OpenAI::Responses::ResponseOutputRefusal - ), + OpenAI::Responses::ResponseContentPartDoneEvent::Part::Variants, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_create_params.rbi b/rbi/openai/models/responses/response_create_params.rbi index 0e582266..592d684d 100644 --- a/rbi/openai/models/responses/response_create_params.rbi +++ b/rbi/openai/models/responses/response_create_params.rbi @@ -25,26 +25,7 @@ module OpenAI # - [Conversation state](https://platform.openai.com/docs/guides/conversation-state) # - [Function calling](https://platform.openai.com/docs/guides/function-calling) sig do - returns( - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Responses::ResponseInputItem::Message, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Responses::ResponseReasoningItem, - OpenAI::Responses::ResponseInputItem::ItemReference - ) - ] - ) - ) + returns(OpenAI::Responses::ResponseCreateParams::Input::Variants) end attr_accessor :input @@ -64,6 +45,11 @@ module OpenAI end attr_accessor :model + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + sig { returns(T.nilable(T::Boolean)) } + attr_accessor :background + # Specify additional output data to include in the model response. Currently # supported values are: # @@ -223,9 +209,13 @@ module OpenAI T.nilable( T::Array[ T.any( - OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, + OpenAI::Responses::FileSearchTool, OpenAI::Responses::ComputerTool, + OpenAI::Responses::Tool::Mcp, + OpenAI::Responses::Tool::CodeInterpreter, + OpenAI::Responses::Tool::ImageGeneration, + OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::WebSearchTool ) ] @@ -239,9 +229,13 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ] @@ -273,8 +267,8 @@ module OpenAI end attr_accessor :truncation - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). sig { returns(T.nilable(String)) } attr_reader :user @@ -284,31 +278,14 @@ module OpenAI sig do params( - input: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage::OrHash, - OpenAI::Responses::ResponseInputItem::Message::OrHash, - OpenAI::Responses::ResponseOutputMessage::OrHash, - OpenAI::Responses::ResponseFileSearchToolCall::OrHash, - OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, - OpenAI::Responses::ResponseFunctionWebSearch::OrHash, - OpenAI::Responses::ResponseFunctionToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash, - OpenAI::Responses::ResponseInputItem::ItemReference::OrHash - ) - ] - ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, model: T.any( String, OpenAI::ChatModel::OrSymbol, OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol ), + background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -335,9 +312,13 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -367,6 +348,9 @@ module OpenAI # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. model:, + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + background: nil, # Specify additional output data to include in the model response. Currently # supported values are: # @@ -474,8 +458,8 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. truncation: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, request_options: {} @@ -485,31 +469,14 @@ module OpenAI sig do override.returns( { - input: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Responses::ResponseInputItem::Message, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Responses::ResponseReasoningItem, - OpenAI::Responses::ResponseInputItem::ItemReference - ) - ] - ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, model: T.any( String, OpenAI::ChatModel::OrSymbol, OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol ), + background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -536,9 +503,13 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, + OpenAI::Responses::FileSearchTool, OpenAI::Responses::ComputerTool, + OpenAI::Responses::Tool::Mcp, + OpenAI::Responses::Tool::CodeInterpreter, + OpenAI::Responses::Tool::ImageGeneration, + OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::WebSearchTool ) ], @@ -571,21 +542,7 @@ module OpenAI T.type_alias do T.any( String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage, - OpenAI::Responses::ResponseInputItem::Message, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput, - OpenAI::Responses::ResponseReasoningItem, - OpenAI::Responses::ResponseInputItem::ItemReference - ) - ] + T::Array[OpenAI::Responses::ResponseInputItem::Variants] ) end diff --git a/rbi/openai/models/responses/response_created_event.rbi b/rbi/openai/models/responses/response_created_event.rbi index ab66dcdd..ce2a5cff 100644 --- a/rbi/openai/models/responses/response_created_event.rbi +++ b/rbi/openai/models/responses/response_created_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number for this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.created`. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # The response that was created. response:, + # The sequence number for this event. + sequence_number:, # The type of the event. Always `response.created`. type: :"response.created" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_error_event.rbi b/rbi/openai/models/responses/response_error_event.rbi index 5c93890e..b651b4d8 100644 --- a/rbi/openai/models/responses/response_error_event.rbi +++ b/rbi/openai/models/responses/response_error_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(T.nilable(String)) } attr_accessor :param + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `error`. sig { returns(Symbol) } attr_accessor :type @@ -34,6 +38,7 @@ module OpenAI code: T.nilable(String), message: String, param: T.nilable(String), + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -44,6 +49,8 @@ module OpenAI message:, # The error parameter. param:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `error`. type: :error ) @@ -55,6 +62,7 @@ module OpenAI code: T.nilable(String), message: String, param: T.nilable(String), + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_failed_event.rbi b/rbi/openai/models/responses/response_failed_event.rbi index 8f8f2c19..8ac778a5 100644 --- a/rbi/openai/models/responses/response_failed_event.rbi +++ b/rbi/openai/models/responses/response_failed_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.failed`. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # The response that failed. response:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.failed`. type: :"response.failed" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_file_search_call_completed_event.rbi b/rbi/openai/models/responses/response_file_search_call_completed_event.rbi index 3b6a2331..dec63c3d 100644 --- a/rbi/openai/models/responses/response_file_search_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_completed_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.file_search_call.completed`. sig { returns(Symbol) } attr_accessor :type # Emitted when a file search call is completed (results found). sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The ID of the output item that the file search call is initiated. item_id:, # The index of the output item that the file search call is initiated. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.file_search_call.completed`. type: :"response.file_search_call.completed" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi b/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi index d93e58f0..e77a9b13 100644 --- a/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_in_progress_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.file_search_call.in_progress`. sig { returns(Symbol) } attr_accessor :type # Emitted when a file search call is initiated. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The ID of the output item that the file search call is initiated. item_id:, # The index of the output item that the file search call is initiated. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.file_search_call.in_progress`. type: :"response.file_search_call.in_progress" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_file_search_call_searching_event.rbi b/rbi/openai/models/responses/response_file_search_call_searching_event.rbi index 6fd13a21..5a9be332 100644 --- a/rbi/openai/models/responses/response_file_search_call_searching_event.rbi +++ b/rbi/openai/models/responses/response_file_search_call_searching_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.file_search_call.searching`. sig { returns(Symbol) } attr_accessor :type # Emitted when a file search is currently searching. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # The ID of the output item that the file search call is initiated. item_id:, # The index of the output item that the file search call is searching. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.file_search_call.searching`. type: :"response.file_search_call.searching" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_file_search_tool_call.rbi b/rbi/openai/models/responses/response_file_search_tool_call.rbi index c2b95318..c6864d65 100644 --- a/rbi/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/openai/models/responses/response_file_search_tool_call.rbi @@ -163,7 +163,12 @@ module OpenAI # characters, booleans, or numbers. sig do returns( - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]) + T.nilable( + T::Hash[ + Symbol, + OpenAI::Responses::ResponseFileSearchToolCall::Result::Attribute::Variants + ] + ) ) end attr_accessor :attributes @@ -199,7 +204,12 @@ module OpenAI sig do params( attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::Responses::ResponseFileSearchToolCall::Result::Attribute::Variants + ] + ), file_id: String, filename: String, score: Float, @@ -228,7 +238,12 @@ module OpenAI override.returns( { attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::Responses::ResponseFileSearchToolCall::Result::Attribute::Variants + ] + ), file_id: String, filename: String, score: Float, diff --git a/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi b/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi index 3e24f3b5..eb59905d 100644 --- a/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi +++ b/rbi/openai/models/responses/response_function_call_arguments_delta_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.function_call_arguments.delta`. sig { returns(Symbol) } attr_accessor :type @@ -34,6 +38,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -44,6 +49,8 @@ module OpenAI item_id:, # The index of the output item that the function-call arguments delta is added to. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.function_call_arguments.delta`. type: :"response.function_call_arguments.delta" ) @@ -55,6 +62,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi b/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi index 5ec5cb31..dac56326 100644 --- a/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi +++ b/rbi/openai/models/responses/response_function_call_arguments_done_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + sig { returns(Symbol) } attr_accessor :type @@ -33,6 +37,7 @@ module OpenAI arguments: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -43,6 +48,8 @@ module OpenAI item_id:, # The index of the output item. output_index:, + # The sequence number of this event. + sequence_number:, type: :"response.function_call_arguments.done" ) end @@ -53,6 +60,7 @@ module OpenAI arguments: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_image_gen_call_completed_event.rbi b/rbi/openai/models/responses/response_image_gen_call_completed_event.rbi new file mode 100644 index 00000000..176049a4 --- /dev/null +++ b/rbi/openai/models/responses/response_image_gen_call_completed_event.rbi @@ -0,0 +1,68 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseImageGenCallCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseImageGenCallCompletedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the image generation item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.image_generation_call.completed'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an image generation tool call has completed and the final image is + # available. + sig do + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the image generation item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.image_generation_call.completed'. + type: :"response.image_generation_call.completed" + ) + end + + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_image_gen_call_generating_event.rbi b/rbi/openai/models/responses/response_image_gen_call_generating_event.rbi new file mode 100644 index 00000000..ece2928d --- /dev/null +++ b/rbi/openai/models/responses/response_image_gen_call_generating_event.rbi @@ -0,0 +1,68 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseImageGenCallGeneratingEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseImageGenCallGeneratingEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the image generation item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of the image generation item being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.image_generation_call.generating'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an image generation tool call is actively generating an image + # (intermediate state). + sig do + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the image generation item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of the image generation item being processed. + sequence_number:, + # The type of the event. Always 'response.image_generation_call.generating'. + type: :"response.image_generation_call.generating" + ) + end + + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_image_gen_call_in_progress_event.rbi b/rbi/openai/models/responses/response_image_gen_call_in_progress_event.rbi new file mode 100644 index 00000000..7bf0d0e4 --- /dev/null +++ b/rbi/openai/models/responses/response_image_gen_call_in_progress_event.rbi @@ -0,0 +1,67 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseImageGenCallInProgressEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseImageGenCallInProgressEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the image generation item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of the image generation item being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.image_generation_call.in_progress'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an image generation tool call is in progress. + sig do + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the image generation item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of the image generation item being processed. + sequence_number:, + # The type of the event. Always 'response.image_generation_call.in_progress'. + type: :"response.image_generation_call.in_progress" + ) + end + + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_image_gen_call_partial_image_event.rbi b/rbi/openai/models/responses/response_image_gen_call_partial_image_event.rbi new file mode 100644 index 00000000..b5f9909a --- /dev/null +++ b/rbi/openai/models/responses/response_image_gen_call_partial_image_event.rbi @@ -0,0 +1,85 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseImageGenCallPartialImageEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseImageGenCallPartialImageEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the image generation item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # Base64-encoded partial image data, suitable for rendering as an image. + sig { returns(String) } + attr_accessor :partial_image_b64 + + # 0-based index for the partial image (backend is 1-based, but this is 0-based for + # the user). + sig { returns(Integer) } + attr_accessor :partial_image_index + + # The sequence number of the image generation item being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.image_generation_call.partial_image'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a partial image is available during image generation streaming. + sig do + params( + item_id: String, + output_index: Integer, + partial_image_b64: String, + partial_image_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the image generation item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # Base64-encoded partial image data, suitable for rendering as an image. + partial_image_b64:, + # 0-based index for the partial image (backend is 1-based, but this is 0-based for + # the user). + partial_image_index:, + # The sequence number of the image generation item being processed. + sequence_number:, + # The type of the event. Always 'response.image_generation_call.partial_image'. + type: :"response.image_generation_call.partial_image" + ) + end + + sig do + override.returns( + { + item_id: String, + output_index: Integer, + partial_image_b64: String, + partial_image_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_in_progress_event.rbi b/rbi/openai/models/responses/response_in_progress_event.rbi index 3bb0ccb9..c3dd5e40 100644 --- a/rbi/openai/models/responses/response_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_in_progress_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.in_progress`. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # The response that is in progress. response:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.in_progress`. type: :"response.in_progress" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_incomplete_event.rbi b/rbi/openai/models/responses/response_incomplete_event.rbi index a30d27a8..591c46e8 100644 --- a/rbi/openai/models/responses/response_incomplete_event.rbi +++ b/rbi/openai/models/responses/response_incomplete_event.rbi @@ -19,6 +19,10 @@ module OpenAI sig { params(response: OpenAI::Responses::Response::OrHash).void } attr_writer :response + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.incomplete`. sig { returns(Symbol) } attr_accessor :type @@ -27,12 +31,15 @@ module OpenAI sig do params( response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end def self.new( # The response that was incomplete. response:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.incomplete`. type: :"response.incomplete" ) @@ -40,7 +47,11 @@ module OpenAI sig do override.returns( - { response: OpenAI::Responses::Response, type: Symbol } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_input_item.rbi b/rbi/openai/models/responses/response_input_item.rbi index 2d59b644..bbafe9cb 100644 --- a/rbi/openai/models/responses/response_input_item.rbi +++ b/rbi/openai/models/responses/response_input_item.rbi @@ -24,6 +24,14 @@ module OpenAI OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Responses::ResponseInputItem::LocalShellCall, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput, + OpenAI::Responses::ResponseInputItem::McpListTools, + OpenAI::Responses::ResponseInputItem::McpApprovalRequest, + OpenAI::Responses::ResponseInputItem::McpApprovalResponse, + OpenAI::Responses::ResponseInputItem::McpCall, OpenAI::Responses::ResponseInputItem::ItemReference ) end @@ -607,6 +615,791 @@ module OpenAI end end + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::ImageGenerationCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the image generation call. + sig { returns(String) } + attr_accessor :id + + # The generated image encoded in base64. + sig { returns(T.nilable(String)) } + attr_accessor :result + + # The status of the image generation call. + sig do + returns( + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::OrSymbol + ) + end + attr_accessor :status + + # The type of the image generation call. Always `image_generation_call`. + sig { returns(Symbol) } + attr_accessor :type + + # An image generation request made by the model. + sig do + params( + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the image generation call. + id:, + # The generated image encoded in base64. + result:, + # The status of the image generation call. + status:, + # The type of the image generation call. Always `image_generation_call`. + type: :image_generation_call + ) + end + + sig do + override.returns( + { + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::OrSymbol, + type: Symbol + } + ) + end + def to_hash + end + + # The status of the image generation call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::TaggedSymbol + ) + GENERATING = + T.let( + :generating, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::ImageGenerationCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::LocalShellCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell call. + sig { returns(String) } + attr_accessor :id + + # Execute a shell command on the server. + sig do + returns( + OpenAI::Responses::ResponseInputItem::LocalShellCall::Action + ) + end + attr_reader :action + + sig do + params( + action: + OpenAI::Responses::ResponseInputItem::LocalShellCall::Action::OrHash + ).void + end + attr_writer :action + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :call_id + + # The status of the local shell call. + sig do + returns( + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::OrSymbol + ) + end + attr_accessor :status + + # The type of the local shell call. Always `local_shell_call`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool call to run a command on the local shell. + sig do + params( + id: String, + action: + OpenAI::Responses::ResponseInputItem::LocalShellCall::Action::OrHash, + call_id: String, + status: + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell call. + id:, + # Execute a shell command on the server. + action:, + # The unique ID of the local shell tool call generated by the model. + call_id:, + # The status of the local shell call. + status:, + # The type of the local shell call. Always `local_shell_call`. + type: :local_shell_call + ) + end + + sig do + override.returns( + { + id: String, + action: + OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, + call_id: String, + status: + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::OrSymbol, + type: Symbol + } + ) + end + def to_hash + end + + class Action < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, + OpenAI::Internal::AnyHash + ) + end + + # The command to run. + sig { returns(T::Array[String]) } + attr_accessor :command + + # Environment variables to set for the command. + sig { returns(T::Hash[Symbol, String]) } + attr_accessor :env + + # The type of the local shell action. Always `exec`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional timeout in milliseconds for the command. + sig { returns(T.nilable(Integer)) } + attr_accessor :timeout_ms + + # Optional user to run the command as. + sig { returns(T.nilable(String)) } + attr_accessor :user + + # Optional working directory to run the command in. + sig { returns(T.nilable(String)) } + attr_accessor :working_directory + + # Execute a shell command on the server. + sig do + params( + command: T::Array[String], + env: T::Hash[Symbol, String], + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The command to run. + command:, + # Environment variables to set for the command. + env:, + # Optional timeout in milliseconds for the command. + timeout_ms: nil, + # Optional user to run the command as. + user: nil, + # Optional working directory to run the command in. + working_directory: nil, + # The type of the local shell action. Always `exec`. + type: :exec + ) + end + + sig do + override.returns( + { + command: T::Array[String], + env: T::Hash[Symbol, String], + type: Symbol, + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String) + } + ) + end + def to_hash + end + end + + # The status of the local shell call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::LocalShellCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the output of the local shell tool call. + sig { returns(String) } + attr_accessor :output + + # The type of the local shell tool call output. Always `local_shell_call_output`. + sig { returns(Symbol) } + attr_accessor :type + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::OrSymbol + ) + ) + end + attr_accessor :status + + # The output of a local shell tool call. + sig do + params( + id: String, + output: String, + status: + T.nilable( + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::OrSymbol + ), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell tool call generated by the model. + id:, + # A JSON string of the output of the local shell tool call. + output:, + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + status: nil, + # The type of the local shell tool call output. Always `local_shell_call_output`. + type: :local_shell_call_output + ) + end + + sig do + override.returns( + { + id: String, + output: String, + type: Symbol, + status: + T.nilable( + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::OrSymbol + ) + } + ) + end + def to_hash + end + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseInputItem::LocalShellCallOutput::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::McpListTools, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the list. + sig { returns(String) } + attr_accessor :id + + # The label of the MCP server. + sig { returns(String) } + attr_accessor :server_label + + # The tools available on the server. + sig do + returns( + T::Array[OpenAI::Responses::ResponseInputItem::McpListTools::Tool] + ) + end + attr_accessor :tools + + # The type of the item. Always `mcp_list_tools`. + sig { returns(Symbol) } + attr_accessor :type + + # Error message if the server could not list tools. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # A list of tools available on an MCP server. + sig do + params( + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Responses::ResponseInputItem::McpListTools::Tool::OrHash + ], + error: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the list. + id:, + # The label of the MCP server. + server_label:, + # The tools available on the server. + tools:, + # Error message if the server could not list tools. + error: nil, + # The type of the item. Always `mcp_list_tools`. + type: :mcp_list_tools + ) + end + + sig do + override.returns( + { + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Responses::ResponseInputItem::McpListTools::Tool + ], + type: Symbol, + error: T.nilable(String) + } + ) + end + def to_hash + end + + class Tool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::McpListTools::Tool, + OpenAI::Internal::AnyHash + ) + end + + # The JSON schema describing the tool's input. + sig { returns(T.anything) } + attr_accessor :input_schema + + # The name of the tool. + sig { returns(String) } + attr_accessor :name + + # Additional annotations about the tool. + sig { returns(T.nilable(T.anything)) } + attr_accessor :annotations + + # The description of the tool. + sig { returns(T.nilable(String)) } + attr_accessor :description + + # A tool available on an MCP server. + sig do + params( + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + ).returns(T.attached_class) + end + def self.new( + # The JSON schema describing the tool's input. + input_schema:, + # The name of the tool. + name:, + # Additional annotations about the tool. + annotations: nil, + # The description of the tool. + description: nil + ) + end + + sig do + override.returns( + { + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + } + ) + end + def to_hash + end + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::McpApprovalRequest, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the approval request. + sig { returns(String) } + attr_accessor :id + + # A JSON string of arguments for the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool to run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server making the request. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_approval_request`. + sig { returns(Symbol) } + attr_accessor :type + + # A request for human approval of a tool invocation. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the approval request. + id:, + # A JSON string of arguments for the tool. + arguments:, + # The name of the tool to run. + name:, + # The label of the MCP server making the request. + server_label:, + # The type of the item. Always `mcp_approval_request`. + type: :mcp_approval_request + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + } + ) + end + def to_hash + end + end + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::McpApprovalResponse, + OpenAI::Internal::AnyHash + ) + end + + # The ID of the approval request being answered. + sig { returns(String) } + attr_accessor :approval_request_id + + # Whether the request was approved. + sig { returns(T::Boolean) } + attr_accessor :approve + + # The type of the item. Always `mcp_approval_response`. + sig { returns(Symbol) } + attr_accessor :type + + # The unique ID of the approval response + sig { returns(T.nilable(String)) } + attr_accessor :id + + # Optional reason for the decision. + sig { returns(T.nilable(String)) } + attr_accessor :reason + + # A response to an MCP approval request. + sig do + params( + approval_request_id: String, + approve: T::Boolean, + id: T.nilable(String), + reason: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The ID of the approval request being answered. + approval_request_id:, + # Whether the request was approved. + approve:, + # The unique ID of the approval response + id: nil, + # Optional reason for the decision. + reason: nil, + # The type of the item. Always `mcp_approval_response`. + type: :mcp_approval_response + ) + end + + sig do + override.returns( + { + approval_request_id: String, + approve: T::Boolean, + type: Symbol, + id: T.nilable(String), + reason: T.nilable(String) + } + ) + end + def to_hash + end + end + + class McpCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseInputItem::McpCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the tool call. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the arguments passed to the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool that was run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server running the tool. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_call`. + sig { returns(Symbol) } + attr_accessor :type + + # The error from the tool call, if any. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # The output from the tool call. + sig { returns(T.nilable(String)) } + attr_accessor :output + + # An invocation of a tool on an MCP server. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + error: T.nilable(String), + output: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the tool call. + id:, + # A JSON string of the arguments passed to the tool. + arguments:, + # The name of the tool that was run. + name:, + # The label of the MCP server running the tool. + server_label:, + # The error from the tool call, if any. + error: nil, + # The output from the tool call. + output: nil, + # The type of the item. Always `mcp_call`. + type: :mcp_call + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol, + error: T.nilable(String), + output: T.nilable(String) + } + ) + end + def to_hash + end + end + class ItemReference < OpenAI::Internal::Type::BaseModel OrHash = T.type_alias do diff --git a/rbi/openai/models/responses/response_input_message_item.rbi b/rbi/openai/models/responses/response_input_message_item.rbi index c1190de8..f36d17e6 100644 --- a/rbi/openai/models/responses/response_input_message_item.rbi +++ b/rbi/openai/models/responses/response_input_message_item.rbi @@ -19,15 +19,7 @@ module OpenAI # A list of one or many input items to the model, containing different content # types. sig do - returns( - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ] - ) + returns(T::Array[OpenAI::Responses::ResponseInputContent::Variants]) end attr_accessor :content @@ -113,13 +105,7 @@ module OpenAI { id: String, content: - T::Array[ - T.any( - OpenAI::Responses::ResponseInputText, - OpenAI::Responses::ResponseInputImage, - OpenAI::Responses::ResponseInputFile - ) - ], + T::Array[OpenAI::Responses::ResponseInputContent::Variants], role: OpenAI::Responses::ResponseInputMessageItem::Role::TaggedSymbol, status: diff --git a/rbi/openai/models/responses/response_item.rbi b/rbi/openai/models/responses/response_item.rbi index 021afb6a..9566555a 100644 --- a/rbi/openai/models/responses/response_item.rbi +++ b/rbi/openai/models/responses/response_item.rbi @@ -17,10 +17,798 @@ module OpenAI OpenAI::Responses::ResponseComputerToolCallOutputItem, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseFunctionToolCallItem, - OpenAI::Responses::ResponseFunctionToolCallOutputItem + OpenAI::Responses::ResponseFunctionToolCallOutputItem, + OpenAI::Responses::ResponseItem::ImageGenerationCall, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Responses::ResponseItem::LocalShellCall, + OpenAI::Responses::ResponseItem::LocalShellCallOutput, + OpenAI::Responses::ResponseItem::McpListTools, + OpenAI::Responses::ResponseItem::McpApprovalRequest, + OpenAI::Responses::ResponseItem::McpApprovalResponse, + OpenAI::Responses::ResponseItem::McpCall ) end + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::ImageGenerationCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the image generation call. + sig { returns(String) } + attr_accessor :id + + # The generated image encoded in base64. + sig { returns(T.nilable(String)) } + attr_accessor :result + + # The status of the image generation call. + sig do + returns( + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ) + end + attr_accessor :status + + # The type of the image generation call. Always `image_generation_call`. + sig { returns(Symbol) } + attr_accessor :type + + # An image generation request made by the model. + sig do + params( + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the image generation call. + id:, + # The generated image encoded in base64. + result:, + # The status of the image generation call. + status:, + # The type of the image generation call. Always `image_generation_call`. + type: :image_generation_call + ) + end + + sig do + override.returns( + { + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + # The status of the image generation call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ) + GENERATING = + T.let( + :generating, + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseItem::ImageGenerationCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::LocalShellCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell call. + sig { returns(String) } + attr_accessor :id + + # Execute a shell command on the server. + sig do + returns(OpenAI::Responses::ResponseItem::LocalShellCall::Action) + end + attr_reader :action + + sig do + params( + action: + OpenAI::Responses::ResponseItem::LocalShellCall::Action::OrHash + ).void + end + attr_writer :action + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :call_id + + # The status of the local shell call. + sig do + returns( + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol + ) + end + attr_accessor :status + + # The type of the local shell call. Always `local_shell_call`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool call to run a command on the local shell. + sig do + params( + id: String, + action: + OpenAI::Responses::ResponseItem::LocalShellCall::Action::OrHash, + call_id: String, + status: + OpenAI::Responses::ResponseItem::LocalShellCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell call. + id:, + # Execute a shell command on the server. + action:, + # The unique ID of the local shell tool call generated by the model. + call_id:, + # The status of the local shell call. + status:, + # The type of the local shell call. Always `local_shell_call`. + type: :local_shell_call + ) + end + + sig do + override.returns( + { + id: String, + action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, + call_id: String, + status: + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + class Action < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::LocalShellCall::Action, + OpenAI::Internal::AnyHash + ) + end + + # The command to run. + sig { returns(T::Array[String]) } + attr_accessor :command + + # Environment variables to set for the command. + sig { returns(T::Hash[Symbol, String]) } + attr_accessor :env + + # The type of the local shell action. Always `exec`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional timeout in milliseconds for the command. + sig { returns(T.nilable(Integer)) } + attr_accessor :timeout_ms + + # Optional user to run the command as. + sig { returns(T.nilable(String)) } + attr_accessor :user + + # Optional working directory to run the command in. + sig { returns(T.nilable(String)) } + attr_accessor :working_directory + + # Execute a shell command on the server. + sig do + params( + command: T::Array[String], + env: T::Hash[Symbol, String], + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The command to run. + command:, + # Environment variables to set for the command. + env:, + # Optional timeout in milliseconds for the command. + timeout_ms: nil, + # Optional user to run the command as. + user: nil, + # Optional working directory to run the command in. + working_directory: nil, + # The type of the local shell action. Always `exec`. + type: :exec + ) + end + + sig do + override.returns( + { + command: T::Array[String], + env: T::Hash[Symbol, String], + type: Symbol, + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String) + } + ) + end + def to_hash + end + end + + # The status of the local shell call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseItem::LocalShellCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseItem::LocalShellCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::LocalShellCallOutput, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the output of the local shell tool call. + sig { returns(String) } + attr_accessor :output + + # The type of the local shell tool call output. Always `local_shell_call_output`. + sig { returns(Symbol) } + attr_accessor :type + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + sig do + returns( + T.nilable( + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ) + ) + end + attr_accessor :status + + # The output of a local shell tool call. + sig do + params( + id: String, + output: String, + status: + T.nilable( + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::OrSymbol + ), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell tool call generated by the model. + id:, + # A JSON string of the output of the local shell tool call. + output:, + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + status: nil, + # The type of the local shell tool call output. Always `local_shell_call_output`. + type: :local_shell_call_output + ) + end + + sig do + override.returns( + { + id: String, + output: String, + type: Symbol, + status: + T.nilable( + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ) + } + ) + end + def to_hash + end + + # The status of the item. One of `in_progress`, `completed`, or `incomplete`. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::McpListTools, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the list. + sig { returns(String) } + attr_accessor :id + + # The label of the MCP server. + sig { returns(String) } + attr_accessor :server_label + + # The tools available on the server. + sig do + returns( + T::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool] + ) + end + attr_accessor :tools + + # The type of the item. Always `mcp_list_tools`. + sig { returns(Symbol) } + attr_accessor :type + + # Error message if the server could not list tools. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # A list of tools available on an MCP server. + sig do + params( + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Responses::ResponseItem::McpListTools::Tool::OrHash + ], + error: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the list. + id:, + # The label of the MCP server. + server_label:, + # The tools available on the server. + tools:, + # Error message if the server could not list tools. + error: nil, + # The type of the item. Always `mcp_list_tools`. + type: :mcp_list_tools + ) + end + + sig do + override.returns( + { + id: String, + server_label: String, + tools: + T::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool], + type: Symbol, + error: T.nilable(String) + } + ) + end + def to_hash + end + + class Tool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::McpListTools::Tool, + OpenAI::Internal::AnyHash + ) + end + + # The JSON schema describing the tool's input. + sig { returns(T.anything) } + attr_accessor :input_schema + + # The name of the tool. + sig { returns(String) } + attr_accessor :name + + # Additional annotations about the tool. + sig { returns(T.nilable(T.anything)) } + attr_accessor :annotations + + # The description of the tool. + sig { returns(T.nilable(String)) } + attr_accessor :description + + # A tool available on an MCP server. + sig do + params( + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + ).returns(T.attached_class) + end + def self.new( + # The JSON schema describing the tool's input. + input_schema:, + # The name of the tool. + name:, + # Additional annotations about the tool. + annotations: nil, + # The description of the tool. + description: nil + ) + end + + sig do + override.returns( + { + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + } + ) + end + def to_hash + end + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::McpApprovalRequest, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the approval request. + sig { returns(String) } + attr_accessor :id + + # A JSON string of arguments for the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool to run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server making the request. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_approval_request`. + sig { returns(Symbol) } + attr_accessor :type + + # A request for human approval of a tool invocation. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the approval request. + id:, + # A JSON string of arguments for the tool. + arguments:, + # The name of the tool to run. + name:, + # The label of the MCP server making the request. + server_label:, + # The type of the item. Always `mcp_approval_request`. + type: :mcp_approval_request + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + } + ) + end + def to_hash + end + end + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::McpApprovalResponse, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the approval response + sig { returns(String) } + attr_accessor :id + + # The ID of the approval request being answered. + sig { returns(String) } + attr_accessor :approval_request_id + + # Whether the request was approved. + sig { returns(T::Boolean) } + attr_accessor :approve + + # The type of the item. Always `mcp_approval_response`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional reason for the decision. + sig { returns(T.nilable(String)) } + attr_accessor :reason + + # A response to an MCP approval request. + sig do + params( + id: String, + approval_request_id: String, + approve: T::Boolean, + reason: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the approval response + id:, + # The ID of the approval request being answered. + approval_request_id:, + # Whether the request was approved. + approve:, + # Optional reason for the decision. + reason: nil, + # The type of the item. Always `mcp_approval_response`. + type: :mcp_approval_response + ) + end + + sig do + override.returns( + { + id: String, + approval_request_id: String, + approve: T::Boolean, + type: Symbol, + reason: T.nilable(String) + } + ) + end + def to_hash + end + end + + class McpCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseItem::McpCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the tool call. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the arguments passed to the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool that was run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server running the tool. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_call`. + sig { returns(Symbol) } + attr_accessor :type + + # The error from the tool call, if any. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # The output from the tool call. + sig { returns(T.nilable(String)) } + attr_accessor :output + + # An invocation of a tool on an MCP server. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + error: T.nilable(String), + output: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the tool call. + id:, + # A JSON string of the arguments passed to the tool. + arguments:, + # The name of the tool that was run. + name:, + # The label of the MCP server running the tool. + server_label:, + # The error from the tool call, if any. + error: nil, + # The output from the tool call. + output: nil, + # The type of the item. Always `mcp_call`. + type: :mcp_call + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol, + error: T.nilable(String), + output: T.nilable(String) + } + ) + end + def to_hash + end + end + sig do override.returns(T::Array[OpenAI::Responses::ResponseItem::Variants]) end diff --git a/rbi/openai/models/responses/response_item_list.rbi b/rbi/openai/models/responses/response_item_list.rbi index b819f00a..4e645d9f 100644 --- a/rbi/openai/models/responses/response_item_list.rbi +++ b/rbi/openai/models/responses/response_item_list.rbi @@ -15,22 +15,7 @@ module OpenAI end # A list of items used to generate this response. - sig do - returns( - T::Array[ - T.any( - OpenAI::Responses::ResponseInputMessageItem, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCallItem, - OpenAI::Responses::ResponseFunctionToolCallOutputItem - ) - ] - ) - end + sig { returns(T::Array[OpenAI::Responses::ResponseItem::Variants]) } attr_accessor :data # The ID of the first item in the list. @@ -62,7 +47,15 @@ module OpenAI OpenAI::Responses::ResponseComputerToolCallOutputItem::OrHash, OpenAI::Responses::ResponseFunctionWebSearch::OrHash, OpenAI::Responses::ResponseFunctionToolCallItem::OrHash, - OpenAI::Responses::ResponseFunctionToolCallOutputItem::OrHash + OpenAI::Responses::ResponseFunctionToolCallOutputItem::OrHash, + OpenAI::Responses::ResponseItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseItem::LocalShellCallOutput::OrHash, + OpenAI::Responses::ResponseItem::McpListTools::OrHash, + OpenAI::Responses::ResponseItem::McpApprovalRequest::OrHash, + OpenAI::Responses::ResponseItem::McpApprovalResponse::OrHash, + OpenAI::Responses::ResponseItem::McpCall::OrHash ) ], first_id: String, @@ -88,19 +81,7 @@ module OpenAI sig do override.returns( { - data: - T::Array[ - T.any( - OpenAI::Responses::ResponseInputMessageItem, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCallItem, - OpenAI::Responses::ResponseFunctionToolCallOutputItem - ) - ], + data: T::Array[OpenAI::Responses::ResponseItem::Variants], first_id: String, has_more: T::Boolean, last_id: String, diff --git a/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi b/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi new file mode 100644 index 00000000..be6f8e3e --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_call_arguments_delta_event.rbi @@ -0,0 +1,76 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent, + OpenAI::Internal::AnyHash + ) + end + + # The partial update to the arguments for the MCP tool call. + sig { returns(T.anything) } + attr_accessor :delta + + # The unique identifier of the MCP tool call item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.mcp_call.arguments_delta'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when there is a delta (partial update) to the arguments of an MCP tool + # call. + sig do + params( + delta: T.anything, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The partial update to the arguments for the MCP tool call. + delta:, + # The unique identifier of the MCP tool call item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.mcp_call.arguments_delta'. + type: :"response.mcp_call.arguments_delta" + ) + end + + sig do + override.returns( + { + delta: T.anything, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi b/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi new file mode 100644 index 00000000..df22b5d2 --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_call_arguments_done_event.rbi @@ -0,0 +1,75 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent, + OpenAI::Internal::AnyHash + ) + end + + # The finalized arguments for the MCP tool call. + sig { returns(T.anything) } + attr_accessor :arguments + + # The unique identifier of the MCP tool call item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.mcp_call.arguments_done'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the arguments for an MCP tool call are finalized. + sig do + params( + arguments: T.anything, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The finalized arguments for the MCP tool call. + arguments:, + # The unique identifier of the MCP tool call item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.mcp_call.arguments_done'. + type: :"response.mcp_call.arguments_done" + ) + end + + sig do + override.returns( + { + arguments: T.anything, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_call_completed_event.rbi b/rbi/openai/models/responses/response_mcp_call_completed_event.rbi new file mode 100644 index 00000000..4ba445d9 --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_call_completed_event.rbi @@ -0,0 +1,43 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpCallCompletedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.mcp_call.completed'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an MCP tool call has completed successfully. + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.mcp_call.completed'. + type: :"response.mcp_call.completed" + ) + end + + sig { override.returns({ sequence_number: Integer, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_call_failed_event.rbi b/rbi/openai/models/responses/response_mcp_call_failed_event.rbi new file mode 100644 index 00000000..81d64fff --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_call_failed_event.rbi @@ -0,0 +1,43 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpCallFailedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.mcp_call.failed'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an MCP tool call has failed. + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.mcp_call.failed'. + type: :"response.mcp_call.failed" + ) + end + + sig { override.returns({ sequence_number: Integer, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi b/rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi new file mode 100644 index 00000000..ebe6399f --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_call_in_progress_event.rbi @@ -0,0 +1,67 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpCallInProgressEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpCallInProgressEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the MCP tool call item being processed. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.mcp_call.in_progress'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an MCP tool call is in progress. + sig do + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the MCP tool call item being processed. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.mcp_call.in_progress'. + type: :"response.mcp_call.in_progress" + ) + end + + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi new file mode 100644 index 00000000..619af81d --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_list_tools_completed_event.rbi @@ -0,0 +1,43 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpListToolsCompletedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.mcp_list_tools.completed'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the list of available MCP tools has been successfully retrieved. + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.mcp_list_tools.completed'. + type: :"response.mcp_list_tools.completed" + ) + end + + sig { override.returns({ sequence_number: Integer, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi new file mode 100644 index 00000000..5ac00403 --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_list_tools_failed_event.rbi @@ -0,0 +1,43 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpListToolsFailedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.mcp_list_tools.failed'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the attempt to list available MCP tools has failed. + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.mcp_list_tools.failed'. + type: :"response.mcp_list_tools.failed" + ) + end + + sig { override.returns({ sequence_number: Integer, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi b/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi new file mode 100644 index 00000000..7bbcbda2 --- /dev/null +++ b/rbi/openai/models/responses/response_mcp_list_tools_in_progress_event.rbi @@ -0,0 +1,44 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseMcpListToolsInProgressEvent, + OpenAI::Internal::AnyHash + ) + end + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.mcp_list_tools.in_progress'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the system is in the process of retrieving the list of available + # MCP tools. + sig do + params(sequence_number: Integer, type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.mcp_list_tools.in_progress'. + type: :"response.mcp_list_tools.in_progress" + ) + end + + sig { override.returns({ sequence_number: Integer, type: Symbol }) } + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_output_item.rbi b/rbi/openai/models/responses/response_output_item.rbi index 01dd560a..51542fc3 100644 --- a/rbi/openai/models/responses/response_output_item.rbi +++ b/rbi/openai/models/responses/response_output_item.rbi @@ -15,10 +15,623 @@ module OpenAI OpenAI::Responses::ResponseFunctionToolCall, OpenAI::Responses::ResponseFunctionWebSearch, OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem + OpenAI::Responses::ResponseReasoningItem, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, + OpenAI::Responses::ResponseCodeInterpreterToolCall, + OpenAI::Responses::ResponseOutputItem::LocalShellCall, + OpenAI::Responses::ResponseOutputItem::McpCall, + OpenAI::Responses::ResponseOutputItem::McpListTools, + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest ) end + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the image generation call. + sig { returns(String) } + attr_accessor :id + + # The generated image encoded in base64. + sig { returns(T.nilable(String)) } + attr_accessor :result + + # The status of the image generation call. + sig do + returns( + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ) + end + attr_accessor :status + + # The type of the image generation call. Always `image_generation_call`. + sig { returns(Symbol) } + attr_accessor :type + + # An image generation request made by the model. + sig do + params( + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the image generation call. + id:, + # The generated image encoded in base64. + result:, + # The status of the image generation call. + status:, + # The type of the image generation call. Always `image_generation_call`. + type: :image_generation_call + ) + end + + sig do + override.returns( + { + id: String, + result: T.nilable(String), + status: + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + # The status of the image generation call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ) + GENERATING = + T.let( + :generating, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ) + FAILED = + T.let( + :failed, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::LocalShellCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the local shell call. + sig { returns(String) } + attr_accessor :id + + # Execute a shell command on the server. + sig do + returns( + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action + ) + end + attr_reader :action + + sig do + params( + action: + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action::OrHash + ).void + end + attr_writer :action + + # The unique ID of the local shell tool call generated by the model. + sig { returns(String) } + attr_accessor :call_id + + # The status of the local shell call. + sig do + returns( + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol + ) + end + attr_accessor :status + + # The type of the local shell call. Always `local_shell_call`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool call to run a command on the local shell. + sig do + params( + id: String, + action: + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action::OrHash, + call_id: String, + status: + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the local shell call. + id:, + # Execute a shell command on the server. + action:, + # The unique ID of the local shell tool call generated by the model. + call_id:, + # The status of the local shell call. + status:, + # The type of the local shell call. Always `local_shell_call`. + type: :local_shell_call + ) + end + + sig do + override.returns( + { + id: String, + action: + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, + call_id: String, + status: + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol, + type: Symbol + } + ) + end + def to_hash + end + + class Action < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, + OpenAI::Internal::AnyHash + ) + end + + # The command to run. + sig { returns(T::Array[String]) } + attr_accessor :command + + # Environment variables to set for the command. + sig { returns(T::Hash[Symbol, String]) } + attr_accessor :env + + # The type of the local shell action. Always `exec`. + sig { returns(Symbol) } + attr_accessor :type + + # Optional timeout in milliseconds for the command. + sig { returns(T.nilable(Integer)) } + attr_accessor :timeout_ms + + # Optional user to run the command as. + sig { returns(T.nilable(String)) } + attr_accessor :user + + # Optional working directory to run the command in. + sig { returns(T.nilable(String)) } + attr_accessor :working_directory + + # Execute a shell command on the server. + sig do + params( + command: T::Array[String], + env: T::Hash[Symbol, String], + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The command to run. + command:, + # Environment variables to set for the command. + env:, + # Optional timeout in milliseconds for the command. + timeout_ms: nil, + # Optional user to run the command as. + user: nil, + # Optional working directory to run the command in. + working_directory: nil, + # The type of the local shell action. Always `exec`. + type: :exec + ) + end + + sig do + override.returns( + { + command: T::Array[String], + env: T::Hash[Symbol, String], + type: Symbol, + timeout_ms: T.nilable(Integer), + user: T.nilable(String), + working_directory: T.nilable(String) + } + ) + end + def to_hash + end + end + + # The status of the local shell call. + module Status + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + IN_PROGRESS = + T.let( + :in_progress, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol + ) + COMPLETED = + T.let( + :completed, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol + ) + INCOMPLETE = + T.let( + :incomplete, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::ResponseOutputItem::LocalShellCall::Status::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class McpCall < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::McpCall, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the tool call. + sig { returns(String) } + attr_accessor :id + + # A JSON string of the arguments passed to the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool that was run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server running the tool. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_call`. + sig { returns(Symbol) } + attr_accessor :type + + # The error from the tool call, if any. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # The output from the tool call. + sig { returns(T.nilable(String)) } + attr_accessor :output + + # An invocation of a tool on an MCP server. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + error: T.nilable(String), + output: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the tool call. + id:, + # A JSON string of the arguments passed to the tool. + arguments:, + # The name of the tool that was run. + name:, + # The label of the MCP server running the tool. + server_label:, + # The error from the tool call, if any. + error: nil, + # The output from the tool call. + output: nil, + # The type of the item. Always `mcp_call`. + type: :mcp_call + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol, + error: T.nilable(String), + output: T.nilable(String) + } + ) + end + def to_hash + end + end + + class McpListTools < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::McpListTools, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the list. + sig { returns(String) } + attr_accessor :id + + # The label of the MCP server. + sig { returns(String) } + attr_accessor :server_label + + # The tools available on the server. + sig do + returns( + T::Array[ + OpenAI::Responses::ResponseOutputItem::McpListTools::Tool + ] + ) + end + attr_accessor :tools + + # The type of the item. Always `mcp_list_tools`. + sig { returns(Symbol) } + attr_accessor :type + + # Error message if the server could not list tools. + sig { returns(T.nilable(String)) } + attr_accessor :error + + # A list of tools available on an MCP server. + sig do + params( + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Responses::ResponseOutputItem::McpListTools::Tool::OrHash + ], + error: T.nilable(String), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the list. + id:, + # The label of the MCP server. + server_label:, + # The tools available on the server. + tools:, + # Error message if the server could not list tools. + error: nil, + # The type of the item. Always `mcp_list_tools`. + type: :mcp_list_tools + ) + end + + sig do + override.returns( + { + id: String, + server_label: String, + tools: + T::Array[ + OpenAI::Responses::ResponseOutputItem::McpListTools::Tool + ], + type: Symbol, + error: T.nilable(String) + } + ) + end + def to_hash + end + + class Tool < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::McpListTools::Tool, + OpenAI::Internal::AnyHash + ) + end + + # The JSON schema describing the tool's input. + sig { returns(T.anything) } + attr_accessor :input_schema + + # The name of the tool. + sig { returns(String) } + attr_accessor :name + + # Additional annotations about the tool. + sig { returns(T.nilable(T.anything)) } + attr_accessor :annotations + + # The description of the tool. + sig { returns(T.nilable(String)) } + attr_accessor :description + + # A tool available on an MCP server. + sig do + params( + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + ).returns(T.attached_class) + end + def self.new( + # The JSON schema describing the tool's input. + input_schema:, + # The name of the tool. + name:, + # Additional annotations about the tool. + annotations: nil, + # The description of the tool. + description: nil + ) + end + + sig do + override.returns( + { + input_schema: T.anything, + name: String, + annotations: T.nilable(T.anything), + description: T.nilable(String) + } + ) + end + def to_hash + end + end + end + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest, + OpenAI::Internal::AnyHash + ) + end + + # The unique ID of the approval request. + sig { returns(String) } + attr_accessor :id + + # A JSON string of arguments for the tool. + sig { returns(String) } + attr_accessor :arguments + + # The name of the tool to run. + sig { returns(String) } + attr_accessor :name + + # The label of the MCP server making the request. + sig { returns(String) } + attr_accessor :server_label + + # The type of the item. Always `mcp_approval_request`. + sig { returns(Symbol) } + attr_accessor :type + + # A request for human approval of a tool invocation. + sig do + params( + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique ID of the approval request. + id:, + # A JSON string of arguments for the tool. + arguments:, + # The name of the tool to run. + name:, + # The label of the MCP server making the request. + server_label:, + # The type of the item. Always `mcp_approval_request`. + type: :mcp_approval_request + ) + end + + sig do + override.returns( + { + id: String, + arguments: String, + name: String, + server_label: String, + type: Symbol + } + ) + end + def to_hash + end + end + sig do override.returns( T::Array[OpenAI::Responses::ResponseOutputItem::Variants] diff --git a/rbi/openai/models/responses/response_output_item_added_event.rbi b/rbi/openai/models/responses/response_output_item_added_event.rbi index 8f37ebb9..6479c80d 100644 --- a/rbi/openai/models/responses/response_output_item_added_event.rbi +++ b/rbi/openai/models/responses/response_output_item_added_event.rbi @@ -13,24 +13,17 @@ module OpenAI end # The output item that was added. - sig do - returns( - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ) - ) - end + sig { returns(OpenAI::Responses::ResponseOutputItem::Variants) } attr_accessor :item # The index of the output item that was added. sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.output_item.added`. sig { returns(Symbol) } attr_accessor :type @@ -45,9 +38,16 @@ module OpenAI OpenAI::Responses::ResponseFunctionToolCall::OrHash, OpenAI::Responses::ResponseFunctionWebSearch::OrHash, OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpListTools::OrHash, + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash ), output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -56,6 +56,8 @@ module OpenAI item:, # The index of the output item that was added. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.output_item.added`. type: :"response.output_item.added" ) @@ -64,16 +66,9 @@ module OpenAI sig do override.returns( { - item: - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ), + item: OpenAI::Responses::ResponseOutputItem::Variants, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_output_item_done_event.rbi b/rbi/openai/models/responses/response_output_item_done_event.rbi index ff21f27a..7789f951 100644 --- a/rbi/openai/models/responses/response_output_item_done_event.rbi +++ b/rbi/openai/models/responses/response_output_item_done_event.rbi @@ -13,24 +13,17 @@ module OpenAI end # The output item that was marked done. - sig do - returns( - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ) - ) - end + sig { returns(OpenAI::Responses::ResponseOutputItem::Variants) } attr_accessor :item # The index of the output item that was marked done. sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.output_item.done`. sig { returns(Symbol) } attr_accessor :type @@ -45,9 +38,16 @@ module OpenAI OpenAI::Responses::ResponseFunctionToolCall::OrHash, OpenAI::Responses::ResponseFunctionWebSearch::OrHash, OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash + OpenAI::Responses::ResponseReasoningItem::OrHash, + OpenAI::Responses::ResponseOutputItem::ImageGenerationCall::OrHash, + OpenAI::Responses::ResponseCodeInterpreterToolCall::OrHash, + OpenAI::Responses::ResponseOutputItem::LocalShellCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpCall::OrHash, + OpenAI::Responses::ResponseOutputItem::McpListTools::OrHash, + OpenAI::Responses::ResponseOutputItem::McpApprovalRequest::OrHash ), output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -56,6 +56,8 @@ module OpenAI item:, # The index of the output item that was marked done. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.output_item.done`. type: :"response.output_item.done" ) @@ -64,16 +66,9 @@ module OpenAI sig do override.returns( { - item: - T.any( - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseFunctionToolCall, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseReasoningItem - ), + item: OpenAI::Responses::ResponseOutputItem::Variants, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi b/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi new file mode 100644 index 00000000..422e56dc --- /dev/null +++ b/rbi/openai/models/responses/response_output_text_annotation_added_event.rbi @@ -0,0 +1,91 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The annotation object being added. (See annotation schema for details.) + sig { returns(T.anything) } + attr_accessor :annotation + + # The index of the annotation within the content part. + sig { returns(Integer) } + attr_accessor :annotation_index + + # The index of the content part within the output item. + sig { returns(Integer) } + attr_accessor :content_index + + # The unique identifier of the item to which the annotation is being added. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.output_text_annotation.added'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when an annotation is added to output text content. + sig do + params( + annotation: T.anything, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The annotation object being added. (See annotation schema for details.) + annotation:, + # The index of the annotation within the content part. + annotation_index:, + # The index of the content part within the output item. + content_index:, + # The unique identifier of the item to which the annotation is being added. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.output_text_annotation.added'. + type: :"response.output_text_annotation.added" + ) + end + + sig do + override.returns( + { + annotation: T.anything, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_queued_event.rbi b/rbi/openai/models/responses/response_queued_event.rbi new file mode 100644 index 00000000..015a597c --- /dev/null +++ b/rbi/openai/models/responses/response_queued_event.rbi @@ -0,0 +1,62 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseQueuedEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseQueuedEvent, + OpenAI::Internal::AnyHash + ) + end + + # The full response object that is queued. + sig { returns(OpenAI::Responses::Response) } + attr_reader :response + + sig { params(response: OpenAI::Responses::Response::OrHash).void } + attr_writer :response + + # The sequence number for this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.queued'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when a response is queued and waiting to be processed. + sig do + params( + response: OpenAI::Responses::Response::OrHash, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The full response object that is queued. + response:, + # The sequence number for this event. + sequence_number:, + # The type of the event. Always 'response.queued'. + type: :"response.queued" + ) + end + + sig do + override.returns( + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_reasoning_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_delta_event.rbi new file mode 100644 index 00000000..37fc9e63 --- /dev/null +++ b/rbi/openai/models/responses/response_reasoning_delta_event.rbi @@ -0,0 +1,83 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningDeltaEvent, + OpenAI::Internal::AnyHash + ) + end + + # The index of the reasoning content part within the output item. + sig { returns(Integer) } + attr_accessor :content_index + + # The partial update to the reasoning content. + sig { returns(T.anything) } + attr_accessor :delta + + # The unique identifier of the item for which reasoning is being updated. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The type of the event. Always 'response.reasoning.delta'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when there is a delta (partial update) to the reasoning content. + sig do + params( + content_index: Integer, + delta: T.anything, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The index of the reasoning content part within the output item. + content_index:, + # The partial update to the reasoning content. + delta:, + # The unique identifier of the item for which reasoning is being updated. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of this event. + sequence_number:, + # The type of the event. Always 'response.reasoning.delta'. + type: :"response.reasoning.delta" + ) + end + + sig do + override.returns( + { + content_index: Integer, + delta: T.anything, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_reasoning_done_event.rbi b/rbi/openai/models/responses/response_reasoning_done_event.rbi new file mode 100644 index 00000000..05c8b893 --- /dev/null +++ b/rbi/openai/models/responses/response_reasoning_done_event.rbi @@ -0,0 +1,83 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningDoneEvent, + OpenAI::Internal::AnyHash + ) + end + + # The index of the reasoning content part within the output item. + sig { returns(Integer) } + attr_accessor :content_index + + # The unique identifier of the item for which reasoning is finalized. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The finalized reasoning text. + sig { returns(String) } + attr_accessor :text + + # The type of the event. Always 'response.reasoning.done'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the reasoning content is finalized for an item. + sig do + params( + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + text: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The index of the reasoning content part within the output item. + content_index:, + # The unique identifier of the item for which reasoning is finalized. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of this event. + sequence_number:, + # The finalized reasoning text. + text:, + # The type of the event. Always 'response.reasoning.done'. + type: :"response.reasoning.done" + ) + end + + sig do + override.returns( + { + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + text: String, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi new file mode 100644 index 00000000..c92fd014 --- /dev/null +++ b/rbi/openai/models/responses/response_reasoning_summary_delta_event.rbi @@ -0,0 +1,85 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningSummaryDeltaEvent, + OpenAI::Internal::AnyHash + ) + end + + # The partial update to the reasoning summary content. + sig { returns(T.anything) } + attr_accessor :delta + + # The unique identifier of the item for which the reasoning summary is being + # updated. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The index of the summary part within the output item. + sig { returns(Integer) } + attr_accessor :summary_index + + # The type of the event. Always 'response.reasoning_summary.delta'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when there is a delta (partial update) to the reasoning summary content. + sig do + params( + delta: T.anything, + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The partial update to the reasoning summary content. + delta:, + # The unique identifier of the item for which the reasoning summary is being + # updated. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of this event. + sequence_number:, + # The index of the summary part within the output item. + summary_index:, + # The type of the event. Always 'response.reasoning_summary.delta'. + type: :"response.reasoning_summary.delta" + ) + end + + sig do + override.returns( + { + delta: T.anything, + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi new file mode 100644 index 00000000..eead2395 --- /dev/null +++ b/rbi/openai/models/responses/response_reasoning_summary_done_event.rbi @@ -0,0 +1,83 @@ +# typed: strong + +module OpenAI + module Models + module Responses + class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::ResponseReasoningSummaryDoneEvent, + OpenAI::Internal::AnyHash + ) + end + + # The unique identifier of the item for which the reasoning summary is finalized. + sig { returns(String) } + attr_accessor :item_id + + # The index of the output item in the response's output array. + sig { returns(Integer) } + attr_accessor :output_index + + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + + # The index of the summary part within the output item. + sig { returns(Integer) } + attr_accessor :summary_index + + # The finalized reasoning summary text. + sig { returns(String) } + attr_accessor :text + + # The type of the event. Always 'response.reasoning_summary.done'. + sig { returns(Symbol) } + attr_accessor :type + + # Emitted when the reasoning summary content is finalized for an item. + sig do + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + text: String, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The unique identifier of the item for which the reasoning summary is finalized. + item_id:, + # The index of the output item in the response's output array. + output_index:, + # The sequence number of this event. + sequence_number:, + # The index of the summary part within the output item. + summary_index:, + # The finalized reasoning summary text. + text:, + # The type of the event. Always 'response.reasoning_summary.done'. + type: :"response.reasoning_summary.done" + ) + end + + sig do + override.returns( + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + text: String, + type: Symbol + } + ) + end + def to_hash + end + end + end + end +end diff --git a/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi index 7e0103dc..400eaae7 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_part_added_event.rbi @@ -36,6 +36,10 @@ module OpenAI end attr_writer :part + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The index of the summary part within the reasoning summary. sig { returns(Integer) } attr_accessor :summary_index @@ -51,6 +55,7 @@ module OpenAI output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part::OrHash, + sequence_number: Integer, summary_index: Integer, type: Symbol ).returns(T.attached_class) @@ -62,6 +67,8 @@ module OpenAI output_index:, # The summary part that was added. part:, + # The sequence number of this event. + sequence_number:, # The index of the summary part within the reasoning summary. summary_index:, # The type of the event. Always `response.reasoning_summary_part.added`. @@ -76,6 +83,7 @@ module OpenAI output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + sequence_number: Integer, summary_index: Integer, type: Symbol } diff --git a/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi index ecf52172..40caaff1 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_part_done_event.rbi @@ -36,6 +36,10 @@ module OpenAI end attr_writer :part + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The index of the summary part within the reasoning summary. sig { returns(Integer) } attr_accessor :summary_index @@ -51,6 +55,7 @@ module OpenAI output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part::OrHash, + sequence_number: Integer, summary_index: Integer, type: Symbol ).returns(T.attached_class) @@ -62,6 +67,8 @@ module OpenAI output_index:, # The completed summary part. part:, + # The sequence number of this event. + sequence_number:, # The index of the summary part within the reasoning summary. summary_index:, # The type of the event. Always `response.reasoning_summary_part.done`. @@ -76,6 +83,7 @@ module OpenAI output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + sequence_number: Integer, summary_index: Integer, type: Symbol } diff --git a/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi index 0367f592..f4af0148 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_text_delta_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The index of the summary part within the reasoning summary. sig { returns(Integer) } attr_accessor :summary_index @@ -38,6 +42,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, type: Symbol ).returns(T.attached_class) @@ -49,6 +54,8 @@ module OpenAI item_id:, # The index of the output item this summary text delta is associated with. output_index:, + # The sequence number of this event. + sequence_number:, # The index of the summary part within the reasoning summary. summary_index:, # The type of the event. Always `response.reasoning_summary_text.delta`. @@ -62,6 +69,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, type: Symbol } diff --git a/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi b/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi index 89590e6b..95ab837f 100644 --- a/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi +++ b/rbi/openai/models/responses/response_reasoning_summary_text_done_event.rbi @@ -20,6 +20,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The index of the summary part within the reasoning summary. sig { returns(Integer) } attr_accessor :summary_index @@ -37,6 +41,7 @@ module OpenAI params( item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, type: Symbol @@ -47,6 +52,8 @@ module OpenAI item_id:, # The index of the output item this summary text is associated with. output_index:, + # The sequence number of this event. + sequence_number:, # The index of the summary part within the reasoning summary. summary_index:, # The full text of the completed reasoning summary. @@ -61,6 +68,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, type: Symbol diff --git a/rbi/openai/models/responses/response_refusal_delta_event.rbi b/rbi/openai/models/responses/response_refusal_delta_event.rbi index cde0e1ec..34271558 100644 --- a/rbi/openai/models/responses/response_refusal_delta_event.rbi +++ b/rbi/openai/models/responses/response_refusal_delta_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.refusal.delta`. sig { returns(Symbol) } attr_accessor :type @@ -39,6 +43,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -51,6 +56,8 @@ module OpenAI item_id:, # The index of the output item that the refusal text is added to. output_index:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.refusal.delta`. type: :"response.refusal.delta" ) @@ -63,6 +70,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_refusal_done_event.rbi b/rbi/openai/models/responses/response_refusal_done_event.rbi index db039d4f..3f7a62e8 100644 --- a/rbi/openai/models/responses/response_refusal_done_event.rbi +++ b/rbi/openai/models/responses/response_refusal_done_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(String) } attr_accessor :refusal + # The sequence number of this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.refusal.done`. sig { returns(Symbol) } attr_accessor :type @@ -39,6 +43,7 @@ module OpenAI item_id: String, output_index: Integer, refusal: String, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -51,6 +56,8 @@ module OpenAI output_index:, # The refusal text that is finalized. refusal:, + # The sequence number of this event. + sequence_number:, # The type of the event. Always `response.refusal.done`. type: :"response.refusal.done" ) @@ -63,6 +70,7 @@ module OpenAI item_id: String, output_index: Integer, refusal: String, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_status.rbi b/rbi/openai/models/responses/response_status.rbi index ed8a5a5d..5eb9802f 100644 --- a/rbi/openai/models/responses/response_status.rbi +++ b/rbi/openai/models/responses/response_status.rbi @@ -4,7 +4,7 @@ module OpenAI module Models module Responses # The status of the response generation. One of `completed`, `failed`, - # `in_progress`, or `incomplete`. + # `in_progress`, `cancelled`, `queued`, or `incomplete`. module ResponseStatus extend OpenAI::Internal::Type::Enum @@ -17,6 +17,9 @@ module OpenAI FAILED = T.let(:failed, OpenAI::Responses::ResponseStatus::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Responses::ResponseStatus::TaggedSymbol) + CANCELLED = + T.let(:cancelled, OpenAI::Responses::ResponseStatus::TaggedSymbol) + QUEUED = T.let(:queued, OpenAI::Responses::ResponseStatus::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Responses::ResponseStatus::TaggedSymbol) diff --git a/rbi/openai/models/responses/response_stream_event.rbi b/rbi/openai/models/responses/response_stream_event.rbi index de4fe138..1afd018a 100644 --- a/rbi/openai/models/responses/response_stream_event.rbi +++ b/rbi/openai/models/responses/response_stream_event.rbi @@ -40,12 +40,29 @@ module OpenAI OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, OpenAI::Responses::ResponseRefusalDeltaEvent, OpenAI::Responses::ResponseRefusalDoneEvent, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Responses::ResponseTextDeltaEvent, OpenAI::Responses::ResponseTextDoneEvent, OpenAI::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Responses::ResponseWebSearchCallInProgressEvent, - OpenAI::Responses::ResponseWebSearchCallSearchingEvent + OpenAI::Responses::ResponseWebSearchCallSearchingEvent, + OpenAI::Responses::ResponseImageGenCallCompletedEvent, + OpenAI::Responses::ResponseImageGenCallGeneratingEvent, + OpenAI::Responses::ResponseImageGenCallInProgressEvent, + OpenAI::Responses::ResponseImageGenCallPartialImageEvent, + OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent, + OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent, + OpenAI::Responses::ResponseMcpCallCompletedEvent, + OpenAI::Responses::ResponseMcpCallFailedEvent, + OpenAI::Responses::ResponseMcpCallInProgressEvent, + OpenAI::Responses::ResponseMcpListToolsCompletedEvent, + OpenAI::Responses::ResponseMcpListToolsFailedEvent, + OpenAI::Responses::ResponseMcpListToolsInProgressEvent, + OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent, + OpenAI::Responses::ResponseQueuedEvent, + OpenAI::Responses::ResponseReasoningDeltaEvent, + OpenAI::Responses::ResponseReasoningDoneEvent, + OpenAI::Responses::ResponseReasoningSummaryDeltaEvent, + OpenAI::Responses::ResponseReasoningSummaryDoneEvent ) end diff --git a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi b/rbi/openai/models/responses/response_text_annotation_delta_event.rbi deleted file mode 100644 index d42d93ce..00000000 --- a/rbi/openai/models/responses/response_text_annotation_delta_event.rbi +++ /dev/null @@ -1,285 +0,0 @@ -# typed: strong - -module OpenAI - module Models - module Responses - class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent, - OpenAI::Internal::AnyHash - ) - end - - # A citation to a file. - sig do - returns( - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ) - ) - end - attr_accessor :annotation - - # The index of the annotation that was added. - sig { returns(Integer) } - attr_accessor :annotation_index - - # The index of the content part that the text annotation was added to. - sig { returns(Integer) } - attr_accessor :content_index - - # The ID of the output item that the text annotation was added to. - sig { returns(String) } - attr_accessor :item_id - - # The index of the output item that the text annotation was added to. - sig { returns(Integer) } - attr_accessor :output_index - - # The type of the event. Always `response.output_text.annotation.added`. - sig { returns(Symbol) } - attr_accessor :type - - # Emitted when a text annotation is added. - sig do - params( - annotation: - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation::OrHash, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation::OrHash, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath::OrHash - ), - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # A citation to a file. - annotation:, - # The index of the annotation that was added. - annotation_index:, - # The index of the content part that the text annotation was added to. - content_index:, - # The ID of the output item that the text annotation was added to. - item_id:, - # The index of the output item that the text annotation was added to. - output_index:, - # The type of the event. Always `response.output_text.annotation.added`. - type: :"response.output_text.annotation.added" - ) - end - - sig do - override.returns( - { - annotation: - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ), - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - type: Symbol - } - ) - end - def to_hash - end - - # A citation to a file. - module Annotation - extend OpenAI::Internal::Type::Union - - Variants = - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - ) - end - - class FileCitation < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, - OpenAI::Internal::AnyHash - ) - end - - # The ID of the file. - sig { returns(String) } - attr_accessor :file_id - - # The index of the file in the list of files. - sig { returns(Integer) } - attr_accessor :index - - # The type of the file citation. Always `file_citation`. - sig { returns(Symbol) } - attr_accessor :type - - # A citation to a file. - sig do - params(file_id: String, index: Integer, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # The ID of the file. - file_id:, - # The index of the file in the list of files. - index:, - # The type of the file citation. Always `file_citation`. - type: :file_citation - ) - end - - sig do - override.returns( - { file_id: String, index: Integer, type: Symbol } - ) - end - def to_hash - end - end - - class URLCitation < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, - OpenAI::Internal::AnyHash - ) - end - - # The index of the last character of the URL citation in the message. - sig { returns(Integer) } - attr_accessor :end_index - - # The index of the first character of the URL citation in the message. - sig { returns(Integer) } - attr_accessor :start_index - - # The title of the web resource. - sig { returns(String) } - attr_accessor :title - - # The type of the URL citation. Always `url_citation`. - sig { returns(Symbol) } - attr_accessor :type - - # The URL of the web resource. - sig { returns(String) } - attr_accessor :url - - # A citation for a web resource used to generate a model response. - sig do - params( - end_index: Integer, - start_index: Integer, - title: String, - url: String, - type: Symbol - ).returns(T.attached_class) - end - def self.new( - # The index of the last character of the URL citation in the message. - end_index:, - # The index of the first character of the URL citation in the message. - start_index:, - # The title of the web resource. - title:, - # The URL of the web resource. - url:, - # The type of the URL citation. Always `url_citation`. - type: :url_citation - ) - end - - sig do - override.returns( - { - end_index: Integer, - start_index: Integer, - title: String, - type: Symbol, - url: String - } - ) - end - def to_hash - end - end - - class FilePath < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any( - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath, - OpenAI::Internal::AnyHash - ) - end - - # The ID of the file. - sig { returns(String) } - attr_accessor :file_id - - # The index of the file in the list of files. - sig { returns(Integer) } - attr_accessor :index - - # The type of the file path. Always `file_path`. - sig { returns(Symbol) } - attr_accessor :type - - # A path to a file. - sig do - params(file_id: String, index: Integer, type: Symbol).returns( - T.attached_class - ) - end - def self.new( - # The ID of the file. - file_id:, - # The index of the file in the list of files. - index:, - # The type of the file path. Always `file_path`. - type: :file_path - ) - end - - sig do - override.returns( - { file_id: String, index: Integer, type: Symbol } - ) - end - def to_hash - end - end - - sig do - override.returns( - T::Array[ - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::Variants - ] - ) - end - def self.variants - end - end - end - end - end -end diff --git a/rbi/openai/models/responses/response_text_delta_event.rbi b/rbi/openai/models/responses/response_text_delta_event.rbi index 661b88d1..ba989ab5 100644 --- a/rbi/openai/models/responses/response_text_delta_event.rbi +++ b/rbi/openai/models/responses/response_text_delta_event.rbi @@ -28,6 +28,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number for this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.output_text.delta`. sig { returns(Symbol) } attr_accessor :type @@ -39,6 +43,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol ).returns(T.attached_class) end @@ -51,6 +56,8 @@ module OpenAI item_id:, # The index of the output item that the text delta was added to. output_index:, + # The sequence number for this event. + sequence_number:, # The type of the event. Always `response.output_text.delta`. type: :"response.output_text.delta" ) @@ -63,6 +70,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: Symbol } ) diff --git a/rbi/openai/models/responses/response_text_done_event.rbi b/rbi/openai/models/responses/response_text_done_event.rbi index 2b142d10..aa712038 100644 --- a/rbi/openai/models/responses/response_text_done_event.rbi +++ b/rbi/openai/models/responses/response_text_done_event.rbi @@ -24,6 +24,10 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number for this event. + sig { returns(Integer) } + attr_accessor :sequence_number + # The text content that is finalized. sig { returns(String) } attr_accessor :text @@ -38,6 +42,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, type: Symbol ).returns(T.attached_class) @@ -49,6 +54,8 @@ module OpenAI item_id:, # The index of the output item that the text content is finalized. output_index:, + # The sequence number for this event. + sequence_number:, # The text content that is finalized. text:, # The type of the event. Always `response.output_text.done`. @@ -62,6 +69,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, type: Symbol } diff --git a/rbi/openai/models/responses/response_web_search_call_completed_event.rbi b/rbi/openai/models/responses/response_web_search_call_completed_event.rbi index f090f369..78f9a4e9 100644 --- a/rbi/openai/models/responses/response_web_search_call_completed_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_completed_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of the web search call being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.web_search_call.completed`. sig { returns(Symbol) } attr_accessor :type # Emitted when a web search call is completed. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # Unique ID for the output item associated with the web search call. item_id:, # The index of the output item that the web search call is associated with. output_index:, + # The sequence number of the web search call being processed. + sequence_number:, # The type of the event. Always `response.web_search_call.completed`. type: :"response.web_search_call.completed" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi b/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi index c8fdaf5a..8fc0415b 100644 --- a/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_in_progress_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of the web search call being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.web_search_call.in_progress`. sig { returns(Symbol) } attr_accessor :type # Emitted when a web search call is initiated. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # Unique ID for the output item associated with the web search call. item_id:, # The index of the output item that the web search call is associated with. output_index:, + # The sequence number of the web search call being processed. + sequence_number:, # The type of the event. Always `response.web_search_call.in_progress`. type: :"response.web_search_call.in_progress" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/response_web_search_call_searching_event.rbi b/rbi/openai/models/responses/response_web_search_call_searching_event.rbi index 5f0b5d33..15ce4ac3 100644 --- a/rbi/openai/models/responses/response_web_search_call_searching_event.rbi +++ b/rbi/openai/models/responses/response_web_search_call_searching_event.rbi @@ -20,21 +20,30 @@ module OpenAI sig { returns(Integer) } attr_accessor :output_index + # The sequence number of the web search call being processed. + sig { returns(Integer) } + attr_accessor :sequence_number + # The type of the event. Always `response.web_search_call.searching`. sig { returns(Symbol) } attr_accessor :type # Emitted when a web search call is executing. sig do - params(item_id: String, output_index: Integer, type: Symbol).returns( - T.attached_class - ) + params( + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + ).returns(T.attached_class) end def self.new( # Unique ID for the output item associated with the web search call. item_id:, # The index of the output item that the web search call is associated with. output_index:, + # The sequence number of the web search call being processed. + sequence_number:, # The type of the event. Always `response.web_search_call.searching`. type: :"response.web_search_call.searching" ) @@ -42,7 +51,12 @@ module OpenAI sig do override.returns( - { item_id: String, output_index: Integer, type: Symbol } + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: Symbol + } ) end def to_hash diff --git a/rbi/openai/models/responses/tool.rbi b/rbi/openai/models/responses/tool.rbi index f9eb832e..0b7fbd91 100644 --- a/rbi/openai/models/responses/tool.rbi +++ b/rbi/openai/models/responses/tool.rbi @@ -10,13 +10,1050 @@ module OpenAI Variants = T.type_alias do T.any( - OpenAI::Responses::FileSearchTool, OpenAI::Responses::FunctionTool, + OpenAI::Responses::FileSearchTool, OpenAI::Responses::ComputerTool, + OpenAI::Responses::Tool::Mcp, + OpenAI::Responses::Tool::CodeInterpreter, + OpenAI::Responses::Tool::ImageGeneration, + OpenAI::Responses::Tool::LocalShell, OpenAI::Responses::WebSearchTool ) end + class Mcp < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::Responses::Tool::Mcp, OpenAI::Internal::AnyHash) + end + + # A label for this MCP server, used to identify it in tool calls. + sig { returns(String) } + attr_accessor :server_label + + # The URL for the MCP server. + sig { returns(String) } + attr_accessor :server_url + + # The type of the MCP tool. Always `mcp`. + sig { returns(Symbol) } + attr_accessor :type + + # List of allowed tool names or a filter object. + sig do + returns( + T.nilable( + T.any( + T::Array[String], + OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + ) + ) + ) + end + attr_accessor :allowed_tools + + # Optional HTTP headers to send to the MCP server. Use for authentication or other + # purposes. + sig { returns(T.nilable(T::Hash[Symbol, String])) } + attr_accessor :headers + + # Specify which of the MCP server's tools require approval. + sig do + returns( + T.nilable( + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::OrSymbol + ) + ) + ) + end + attr_accessor :require_approval + + # Give the model access to additional tools via remote Model Context Protocol + # (MCP) servers. + # [Learn more about MCP](https://platform.openai.com/docs/guides/tools-remote-mcp). + sig do + params( + server_label: String, + server_url: String, + allowed_tools: + T.nilable( + T.any( + T::Array[String], + OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter::OrHash + ) + ), + headers: T.nilable(T::Hash[Symbol, String]), + require_approval: + T.nilable( + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::OrHash, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::OrSymbol + ) + ), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # A label for this MCP server, used to identify it in tool calls. + server_label:, + # The URL for the MCP server. + server_url:, + # List of allowed tool names or a filter object. + allowed_tools: nil, + # Optional HTTP headers to send to the MCP server. Use for authentication or other + # purposes. + headers: nil, + # Specify which of the MCP server's tools require approval. + require_approval: nil, + # The type of the MCP tool. Always `mcp`. + type: :mcp + ) + end + + sig do + override.returns( + { + server_label: String, + server_url: String, + type: Symbol, + allowed_tools: + T.nilable( + T.any( + T::Array[String], + OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + ) + ), + headers: T.nilable(T::Hash[Symbol, String]), + require_approval: + T.nilable( + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::OrSymbol + ) + ) + } + ) + end + def to_hash + end + + # List of allowed tool names or a filter object. + module AllowedTools + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + T::Array[String], + OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + ) + end + + class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter, + OpenAI::Internal::AnyHash + ) + end + + # List of allowed tool names. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :tool_names + + sig { params(tool_names: T::Array[String]).void } + attr_writer :tool_names + + # A filter object to specify which tools are allowed. + sig do + params(tool_names: T::Array[String]).returns(T.attached_class) + end + def self.new( + # List of allowed tool names. + tool_names: nil + ) + end + + sig { override.returns({ tool_names: T::Array[String] }) } + def to_hash + end + end + + sig do + override.returns( + T::Array[OpenAI::Responses::Tool::Mcp::AllowedTools::Variants] + ) + end + def self.variants + end + + StringArray = + T.let( + OpenAI::Internal::Type::ArrayOf[String], + OpenAI::Internal::Type::Converter + ) + end + + # Specify which of the MCP server's tools require approval. + module RequireApproval + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::TaggedSymbol + ) + end + + class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter, + OpenAI::Internal::AnyHash + ) + end + + # A list of tools that always require approval. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always + ) + ) + end + attr_reader :always + + sig do + params( + always: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always::OrHash + ).void + end + attr_writer :always + + # A list of tools that never require approval. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + ) + ) + end + attr_reader :never + + sig do + params( + never: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never::OrHash + ).void + end + attr_writer :never + + sig do + params( + always: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always::OrHash, + never: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never::OrHash + ).returns(T.attached_class) + end + def self.new( + # A list of tools that always require approval. + always: nil, + # A list of tools that never require approval. + never: nil + ) + end + + sig do + override.returns( + { + always: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, + never: + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + } + ) + end + def to_hash + end + + class Always < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, + OpenAI::Internal::AnyHash + ) + end + + # List of tools that require approval. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :tool_names + + sig { params(tool_names: T::Array[String]).void } + attr_writer :tool_names + + # A list of tools that always require approval. + sig do + params(tool_names: T::Array[String]).returns(T.attached_class) + end + def self.new( + # List of tools that require approval. + tool_names: nil + ) + end + + sig { override.returns({ tool_names: T::Array[String] }) } + def to_hash + end + end + + class Never < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never, + OpenAI::Internal::AnyHash + ) + end + + # List of tools that do not require approval. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :tool_names + + sig { params(tool_names: T::Array[String]).void } + attr_writer :tool_names + + # A list of tools that never require approval. + sig do + params(tool_names: T::Array[String]).returns(T.attached_class) + end + def self.new( + # List of tools that do not require approval. + tool_names: nil + ) + end + + sig { override.returns({ tool_names: T::Array[String] }) } + def to_hash + end + end + end + + # Specify a single approval policy for all tools. One of `always` or `never`. When + # set to `always`, all tools will require approval. When set to `never`, all tools + # will not require approval. + module McpToolApprovalSetting + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + ALWAYS = + T.let( + :always, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::TaggedSymbol + ) + NEVER = + T.let( + :never, + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalSetting::TaggedSymbol + ] + ) + end + def self.values + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::Mcp::RequireApproval::Variants + ] + ) + end + def self.variants + end + end + end + + class CodeInterpreter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::CodeInterpreter, + OpenAI::Internal::AnyHash + ) + end + + # The code interpreter container. Can be a container ID or an object that + # specifies uploaded file IDs to make available to your code. + sig do + returns( + T.any( + String, + OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto + ) + ) + end + attr_accessor :container + + # The type of the code interpreter tool. Always `code_interpreter`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool that runs Python code to help generate a response to a prompt. + sig do + params( + container: + T.any( + String, + OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto::OrHash + ), + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # The code interpreter container. Can be a container ID or an object that + # specifies uploaded file IDs to make available to your code. + container:, + # The type of the code interpreter tool. Always `code_interpreter`. + type: :code_interpreter + ) + end + + sig do + override.returns( + { + container: + T.any( + String, + OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto + ), + type: Symbol + } + ) + end + def to_hash + end + + # The code interpreter container. Can be a container ID or an object that + # specifies uploaded file IDs to make available to your code. + module Container + extend OpenAI::Internal::Type::Union + + Variants = + T.type_alias do + T.any( + String, + OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto + ) + end + + class CodeInterpreterToolAuto < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto, + OpenAI::Internal::AnyHash + ) + end + + # Always `auto`. + sig { returns(Symbol) } + attr_accessor :type + + # An optional list of uploaded files to make available to your code. + sig { returns(T.nilable(T::Array[String])) } + attr_reader :file_ids + + sig { params(file_ids: T::Array[String]).void } + attr_writer :file_ids + + # Configuration for a code interpreter container. Optionally specify the IDs of + # the files to run the code on. + sig do + params(file_ids: T::Array[String], type: Symbol).returns( + T.attached_class + ) + end + def self.new( + # An optional list of uploaded files to make available to your code. + file_ids: nil, + # Always `auto`. + type: :auto + ) + end + + sig do + override.returns({ type: Symbol, file_ids: T::Array[String] }) + end + def to_hash + end + end + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::CodeInterpreter::Container::Variants + ] + ) + end + def self.variants + end + end + end + + class ImageGeneration < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::ImageGeneration, + OpenAI::Internal::AnyHash + ) + end + + # The type of the image generation tool. Always `image_generation`. + sig { returns(Symbol) } + attr_accessor :type + + # Background type for the generated image. One of `transparent`, `opaque`, or + # `auto`. Default: `auto`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol + ) + ) + end + attr_reader :background + + sig do + params( + background: + OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol + ).void + end + attr_writer :background + + # Optional mask for inpainting. Contains `image_url` (string, optional) and + # `file_id` (string, optional). + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::InputImageMask + ) + ) + end + attr_reader :input_image_mask + + sig do + params( + input_image_mask: + OpenAI::Responses::Tool::ImageGeneration::InputImageMask::OrHash + ).void + end + attr_writer :input_image_mask + + # The image generation model to use. Default: `gpt-image-1`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::Model::OrSymbol + ) + ) + end + attr_reader :model + + sig do + params( + model: OpenAI::Responses::Tool::ImageGeneration::Model::OrSymbol + ).void + end + attr_writer :model + + # Moderation level for the generated image. Default: `auto`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::Moderation::OrSymbol + ) + ) + end + attr_reader :moderation + + sig do + params( + moderation: + OpenAI::Responses::Tool::ImageGeneration::Moderation::OrSymbol + ).void + end + attr_writer :moderation + + # Compression level for the output image. Default: 100. + sig { returns(T.nilable(Integer)) } + attr_reader :output_compression + + sig { params(output_compression: Integer).void } + attr_writer :output_compression + + # The output format of the generated image. One of `png`, `webp`, or `jpeg`. + # Default: `png`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::OrSymbol + ) + ) + end + attr_reader :output_format + + sig do + params( + output_format: + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::OrSymbol + ).void + end + attr_writer :output_format + + # Number of partial images to generate in streaming mode, from 0 (default value) + # to 3. + sig { returns(T.nilable(Integer)) } + attr_reader :partial_images + + sig { params(partial_images: Integer).void } + attr_writer :partial_images + + # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. + # Default: `auto`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::Quality::OrSymbol + ) + ) + end + attr_reader :quality + + sig do + params( + quality: + OpenAI::Responses::Tool::ImageGeneration::Quality::OrSymbol + ).void + end + attr_writer :quality + + # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, + # or `auto`. Default: `auto`. + sig do + returns( + T.nilable( + OpenAI::Responses::Tool::ImageGeneration::Size::OrSymbol + ) + ) + end + attr_reader :size + + sig do + params( + size: OpenAI::Responses::Tool::ImageGeneration::Size::OrSymbol + ).void + end + attr_writer :size + + # A tool that generates images using a model like `gpt-image-1`. + sig do + params( + background: + OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol, + input_image_mask: + OpenAI::Responses::Tool::ImageGeneration::InputImageMask::OrHash, + model: OpenAI::Responses::Tool::ImageGeneration::Model::OrSymbol, + moderation: + OpenAI::Responses::Tool::ImageGeneration::Moderation::OrSymbol, + output_compression: Integer, + output_format: + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::OrSymbol, + partial_images: Integer, + quality: + OpenAI::Responses::Tool::ImageGeneration::Quality::OrSymbol, + size: OpenAI::Responses::Tool::ImageGeneration::Size::OrSymbol, + type: Symbol + ).returns(T.attached_class) + end + def self.new( + # Background type for the generated image. One of `transparent`, `opaque`, or + # `auto`. Default: `auto`. + background: nil, + # Optional mask for inpainting. Contains `image_url` (string, optional) and + # `file_id` (string, optional). + input_image_mask: nil, + # The image generation model to use. Default: `gpt-image-1`. + model: nil, + # Moderation level for the generated image. Default: `auto`. + moderation: nil, + # Compression level for the output image. Default: 100. + output_compression: nil, + # The output format of the generated image. One of `png`, `webp`, or `jpeg`. + # Default: `png`. + output_format: nil, + # Number of partial images to generate in streaming mode, from 0 (default value) + # to 3. + partial_images: nil, + # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. + # Default: `auto`. + quality: nil, + # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, + # or `auto`. Default: `auto`. + size: nil, + # The type of the image generation tool. Always `image_generation`. + type: :image_generation + ) + end + + sig do + override.returns( + { + type: Symbol, + background: + OpenAI::Responses::Tool::ImageGeneration::Background::OrSymbol, + input_image_mask: + OpenAI::Responses::Tool::ImageGeneration::InputImageMask, + model: + OpenAI::Responses::Tool::ImageGeneration::Model::OrSymbol, + moderation: + OpenAI::Responses::Tool::ImageGeneration::Moderation::OrSymbol, + output_compression: Integer, + output_format: + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::OrSymbol, + partial_images: Integer, + quality: + OpenAI::Responses::Tool::ImageGeneration::Quality::OrSymbol, + size: OpenAI::Responses::Tool::ImageGeneration::Size::OrSymbol + } + ) + end + def to_hash + end + + # Background type for the generated image. One of `transparent`, `opaque`, or + # `auto`. Default: `auto`. + module Background + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::ImageGeneration::Background + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + TRANSPARENT = + T.let( + :transparent, + OpenAI::Responses::Tool::ImageGeneration::Background::TaggedSymbol + ) + OPAQUE = + T.let( + :opaque, + OpenAI::Responses::Tool::ImageGeneration::Background::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::Responses::Tool::ImageGeneration::Background::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::Background::TaggedSymbol + ] + ) + end + def self.values + end + end + + class InputImageMask < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::ImageGeneration::InputImageMask, + OpenAI::Internal::AnyHash + ) + end + + # File ID for the mask image. + sig { returns(T.nilable(String)) } + attr_reader :file_id + + sig { params(file_id: String).void } + attr_writer :file_id + + # Base64-encoded mask image. + sig { returns(T.nilable(String)) } + attr_reader :image_url + + sig { params(image_url: String).void } + attr_writer :image_url + + # Optional mask for inpainting. Contains `image_url` (string, optional) and + # `file_id` (string, optional). + sig do + params(file_id: String, image_url: String).returns( + T.attached_class + ) + end + def self.new( + # File ID for the mask image. + file_id: nil, + # Base64-encoded mask image. + image_url: nil + ) + end + + sig { override.returns({ file_id: String, image_url: String }) } + def to_hash + end + end + + # The image generation model to use. Default: `gpt-image-1`. + module Model + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Tool::ImageGeneration::Model) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + GPT_IMAGE_1 = + T.let( + :"gpt-image-1", + OpenAI::Responses::Tool::ImageGeneration::Model::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::Model::TaggedSymbol + ] + ) + end + def self.values + end + end + + # Moderation level for the generated image. Default: `auto`. + module Moderation + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::ImageGeneration::Moderation + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + AUTO = + T.let( + :auto, + OpenAI::Responses::Tool::ImageGeneration::Moderation::TaggedSymbol + ) + LOW = + T.let( + :low, + OpenAI::Responses::Tool::ImageGeneration::Moderation::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::Moderation::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The output format of the generated image. One of `png`, `webp`, or `jpeg`. + # Default: `png`. + module OutputFormat + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all( + Symbol, + OpenAI::Responses::Tool::ImageGeneration::OutputFormat + ) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + PNG = + T.let( + :png, + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::TaggedSymbol + ) + WEBP = + T.let( + :webp, + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::TaggedSymbol + ) + JPEG = + T.let( + :jpeg, + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::OutputFormat::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The quality of the generated image. One of `low`, `medium`, `high`, or `auto`. + # Default: `auto`. + module Quality + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Tool::ImageGeneration::Quality) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + LOW = + T.let( + :low, + OpenAI::Responses::Tool::ImageGeneration::Quality::TaggedSymbol + ) + MEDIUM = + T.let( + :medium, + OpenAI::Responses::Tool::ImageGeneration::Quality::TaggedSymbol + ) + HIGH = + T.let( + :high, + OpenAI::Responses::Tool::ImageGeneration::Quality::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::Responses::Tool::ImageGeneration::Quality::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::Quality::TaggedSymbol + ] + ) + end + def self.values + end + end + + # The size of the generated image. One of `1024x1024`, `1024x1536`, `1536x1024`, + # or `auto`. Default: `auto`. + module Size + extend OpenAI::Internal::Type::Enum + + TaggedSymbol = + T.type_alias do + T.all(Symbol, OpenAI::Responses::Tool::ImageGeneration::Size) + end + OrSymbol = T.type_alias { T.any(Symbol, String) } + + SIZE_1024X1024 = + T.let( + :"1024x1024", + OpenAI::Responses::Tool::ImageGeneration::Size::TaggedSymbol + ) + SIZE_1024X1536 = + T.let( + :"1024x1536", + OpenAI::Responses::Tool::ImageGeneration::Size::TaggedSymbol + ) + SIZE_1536X1024 = + T.let( + :"1536x1024", + OpenAI::Responses::Tool::ImageGeneration::Size::TaggedSymbol + ) + AUTO = + T.let( + :auto, + OpenAI::Responses::Tool::ImageGeneration::Size::TaggedSymbol + ) + + sig do + override.returns( + T::Array[ + OpenAI::Responses::Tool::ImageGeneration::Size::TaggedSymbol + ] + ) + end + def self.values + end + end + end + + class LocalShell < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::Responses::Tool::LocalShell, + OpenAI::Internal::AnyHash + ) + end + + # The type of the local shell tool. Always `local_shell`. + sig { returns(Symbol) } + attr_accessor :type + + # A tool that allows the model to execute shell commands in a local environment. + sig { params(type: Symbol).returns(T.attached_class) } + def self.new( + # The type of the local shell tool. Always `local_shell`. + type: :local_shell + ) + end + + sig { override.returns({ type: Symbol }) } + def to_hash + end + end + sig { override.returns(T::Array[OpenAI::Responses::Tool::Variants]) } def self.variants end diff --git a/rbi/openai/models/responses/tool_choice_types.rbi b/rbi/openai/models/responses/tool_choice_types.rbi index 3851436c..f66d4aac 100644 --- a/rbi/openai/models/responses/tool_choice_types.rbi +++ b/rbi/openai/models/responses/tool_choice_types.rbi @@ -17,6 +17,9 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # - `code_interpreter` + # - `mcp` + # - `image_generation` sig { returns(OpenAI::Responses::ToolChoiceTypes::Type::OrSymbol) } attr_accessor :type @@ -36,6 +39,9 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # - `code_interpreter` + # - `mcp` + # - `image_generation` type: ) end @@ -56,6 +62,9 @@ module OpenAI # - `file_search` # - `web_search_preview` # - `computer_use_preview` + # - `code_interpreter` + # - `mcp` + # - `image_generation` module Type extend OpenAI::Internal::Type::Enum @@ -85,6 +94,18 @@ module OpenAI :web_search_preview_2025_03_11, OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol ) + IMAGE_GENERATION = + T.let( + :image_generation, + OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol + ) + CODE_INTERPRETER = + T.let( + :code_interpreter, + OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol + ) + MCP = + T.let(:mcp, OpenAI::Responses::ToolChoiceTypes::Type::TaggedSymbol) sig do override.returns( diff --git a/rbi/openai/models/uploads/part_create_params.rbi b/rbi/openai/models/uploads/part_create_params.rbi index 1b069db5..031b224d 100644 --- a/rbi/openai/models/uploads/part_create_params.rbi +++ b/rbi/openai/models/uploads/part_create_params.rbi @@ -13,12 +13,12 @@ module OpenAI end # The chunk of bytes for this Part. - sig { returns(T.any(Pathname, StringIO, IO, OpenAI::FilePart)) } + sig { returns(OpenAI::Internal::FileInput) } attr_accessor :data sig do params( - data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + data: OpenAI::Internal::FileInput, request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -32,7 +32,7 @@ module OpenAI sig do override.returns( { - data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + data: OpenAI::Internal::FileInput, request_options: OpenAI::RequestOptions } ) diff --git a/rbi/openai/models/vector_store.rbi b/rbi/openai/models/vector_store.rbi index c3c6dc2d..1e5ec9ca 100644 --- a/rbi/openai/models/vector_store.rbi +++ b/rbi/openai/models/vector_store.rbi @@ -52,11 +52,11 @@ module OpenAI attr_accessor :usage_bytes # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::VectorStoreExpirationAfter)) } + sig { returns(T.nilable(OpenAI::VectorStore::ExpiresAfter)) } attr_reader :expires_after sig do - params(expires_after: OpenAI::VectorStoreExpirationAfter::OrHash).void + params(expires_after: OpenAI::VectorStore::ExpiresAfter::OrHash).void end attr_writer :expires_after @@ -76,7 +76,7 @@ module OpenAI name: String, status: OpenAI::VectorStore::Status::OrSymbol, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter::OrHash, + expires_after: OpenAI::VectorStore::ExpiresAfter::OrHash, expires_at: T.nilable(Integer), object: Symbol ).returns(T.attached_class) @@ -125,7 +125,7 @@ module OpenAI object: Symbol, status: OpenAI::VectorStore::Status::TaggedSymbol, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter, + expires_after: OpenAI::VectorStore::ExpiresAfter, expires_at: T.nilable(Integer) } ) @@ -218,6 +218,37 @@ module OpenAI def self.values end end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any(OpenAI::VectorStore::ExpiresAfter, OpenAI::Internal::AnyHash) + end + + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + sig { returns(Symbol) } + attr_accessor :anchor + + # The number of days after the anchor time that the vector store will expire. + sig { returns(Integer) } + attr_accessor :days + + # The expiration policy for a vector store. + sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } + def self.new( + # The number of days after the anchor time that the vector store will expire. + days:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + anchor: :last_active_at + ) + end + + sig { override.returns({ anchor: Symbol, days: Integer }) } + def to_hash + end + end end end end diff --git a/rbi/openai/models/vector_store_create_params.rbi b/rbi/openai/models/vector_store_create_params.rbi index fb7b5912..674fc93d 100644 --- a/rbi/openai/models/vector_store_create_params.rbi +++ b/rbi/openai/models/vector_store_create_params.rbi @@ -37,11 +37,13 @@ module OpenAI attr_writer :chunking_strategy # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::VectorStoreExpirationAfter)) } + sig { returns(T.nilable(OpenAI::VectorStoreCreateParams::ExpiresAfter)) } attr_reader :expires_after sig do - params(expires_after: OpenAI::VectorStoreExpirationAfter::OrHash).void + params( + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash + ).void end attr_writer :expires_after @@ -77,7 +79,7 @@ module OpenAI OpenAI::AutoFileChunkingStrategyParam::OrHash, OpenAI::StaticFileChunkingStrategyObjectParam::OrHash ), - expires_after: OpenAI::VectorStoreExpirationAfter::OrHash, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -115,7 +117,7 @@ module OpenAI OpenAI::AutoFileChunkingStrategyParam, OpenAI::StaticFileChunkingStrategyObjectParam ), - expires_after: OpenAI::VectorStoreExpirationAfter, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -125,6 +127,40 @@ module OpenAI end def to_hash end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStoreCreateParams::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + sig { returns(Symbol) } + attr_accessor :anchor + + # The number of days after the anchor time that the vector store will expire. + sig { returns(Integer) } + attr_accessor :days + + # The expiration policy for a vector store. + sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } + def self.new( + # The number of days after the anchor time that the vector store will expire. + days:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + anchor: :last_active_at + ) + end + + sig { override.returns({ anchor: Symbol, days: Integer }) } + def to_hash + end + end end end end diff --git a/rbi/openai/models/vector_store_expiration_after.rbi b/rbi/openai/models/vector_store_expiration_after.rbi deleted file mode 100644 index 7b06060c..00000000 --- a/rbi/openai/models/vector_store_expiration_after.rbi +++ /dev/null @@ -1,36 +0,0 @@ -# typed: strong - -module OpenAI - module Models - class VectorStoreExpirationAfter < OpenAI::Internal::Type::BaseModel - OrHash = - T.type_alias do - T.any(OpenAI::VectorStoreExpirationAfter, OpenAI::Internal::AnyHash) - end - - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - sig { returns(Symbol) } - attr_accessor :anchor - - # The number of days after the anchor time that the vector store will expire. - sig { returns(Integer) } - attr_accessor :days - - # The expiration policy for a vector store. - sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } - def self.new( - # The number of days after the anchor time that the vector store will expire. - days:, - # Anchor timestamp after which the expiration policy applies. Supported anchors: - # `last_active_at`. - anchor: :last_active_at - ) - end - - sig { override.returns({ anchor: Symbol, days: Integer }) } - def to_hash - end - end - end -end diff --git a/rbi/openai/models/vector_store_search_params.rbi b/rbi/openai/models/vector_store_search_params.rbi index 20dd60cc..3d78d7c9 100644 --- a/rbi/openai/models/vector_store_search_params.rbi +++ b/rbi/openai/models/vector_store_search_params.rbi @@ -12,7 +12,7 @@ module OpenAI end # A query string for a search - sig { returns(T.any(String, T::Array[String])) } + sig { returns(OpenAI::VectorStoreSearchParams::Query::Variants) } attr_accessor :query # A filter to apply based on file attributes. @@ -65,7 +65,7 @@ module OpenAI sig do params( - query: T.any(String, T::Array[String]), + query: OpenAI::VectorStoreSearchParams::Query::Variants, filters: T.any( OpenAI::ComparisonFilter::OrHash, @@ -97,7 +97,7 @@ module OpenAI sig do override.returns( { - query: T.any(String, T::Array[String]), + query: OpenAI::VectorStoreSearchParams::Query::Variants, filters: T.any(OpenAI::ComparisonFilter, OpenAI::CompoundFilter), max_num_results: Integer, ranking_options: OpenAI::VectorStoreSearchParams::RankingOptions, diff --git a/rbi/openai/models/vector_store_search_response.rbi b/rbi/openai/models/vector_store_search_response.rbi index 05bfc3de..eb825847 100644 --- a/rbi/openai/models/vector_store_search_response.rbi +++ b/rbi/openai/models/vector_store_search_response.rbi @@ -17,7 +17,14 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig do - returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + returns( + T.nilable( + T::Hash[ + Symbol, + OpenAI::Models::VectorStoreSearchResponse::Attribute::Variants + ] + ) + ) end attr_accessor :attributes @@ -42,7 +49,12 @@ module OpenAI sig do params( attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::Models::VectorStoreSearchResponse::Attribute::Variants + ] + ), content: T::Array[ OpenAI::Models::VectorStoreSearchResponse::Content::OrHash @@ -74,7 +86,12 @@ module OpenAI override.returns( { attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::Models::VectorStoreSearchResponse::Attribute::Variants + ] + ), content: T::Array[OpenAI::Models::VectorStoreSearchResponse::Content], file_id: String, diff --git a/rbi/openai/models/vector_store_update_params.rbi b/rbi/openai/models/vector_store_update_params.rbi index 8e2409f5..1d755b92 100644 --- a/rbi/openai/models/vector_store_update_params.rbi +++ b/rbi/openai/models/vector_store_update_params.rbi @@ -12,12 +12,13 @@ module OpenAI end # The expiration policy for a vector store. - sig { returns(T.nilable(OpenAI::VectorStoreExpirationAfter)) } + sig { returns(T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter)) } attr_reader :expires_after sig do params( - expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter::OrHash) + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash) ).void end attr_writer :expires_after @@ -37,7 +38,8 @@ module OpenAI sig do params( - expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter::OrHash), + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: OpenAI::RequestOptions::OrHash @@ -62,7 +64,8 @@ module OpenAI sig do override.returns( { - expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter), + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: OpenAI::RequestOptions @@ -71,6 +74,40 @@ module OpenAI end def to_hash end + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + OrHash = + T.type_alias do + T.any( + OpenAI::VectorStoreUpdateParams::ExpiresAfter, + OpenAI::Internal::AnyHash + ) + end + + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + sig { returns(Symbol) } + attr_accessor :anchor + + # The number of days after the anchor time that the vector store will expire. + sig { returns(Integer) } + attr_accessor :days + + # The expiration policy for a vector store. + sig { params(days: Integer, anchor: Symbol).returns(T.attached_class) } + def self.new( + # The number of days after the anchor time that the vector store will expire. + days:, + # Anchor timestamp after which the expiration policy applies. Supported anchors: + # `last_active_at`. + anchor: :last_active_at + ) + end + + sig { override.returns({ anchor: Symbol, days: Integer }) } + def to_hash + end + end end end end diff --git a/rbi/openai/models/vector_stores/file_batch_create_params.rbi b/rbi/openai/models/vector_stores/file_batch_create_params.rbi index 3ac858b8..c4e42f6b 100644 --- a/rbi/openai/models/vector_stores/file_batch_create_params.rbi +++ b/rbi/openai/models/vector_stores/file_batch_create_params.rbi @@ -27,7 +27,14 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig do - returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + returns( + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileBatchCreateParams::Attribute::Variants + ] + ) + ) end attr_accessor :attributes @@ -60,7 +67,12 @@ module OpenAI params( file_ids: T::Array[String], attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileBatchCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam::OrHash, @@ -92,7 +104,12 @@ module OpenAI { file_ids: T::Array[String], attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileBatchCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam, diff --git a/rbi/openai/models/vector_stores/file_create_params.rbi b/rbi/openai/models/vector_stores/file_create_params.rbi index b98a191b..a335e71c 100644 --- a/rbi/openai/models/vector_stores/file_create_params.rbi +++ b/rbi/openai/models/vector_stores/file_create_params.rbi @@ -27,7 +27,14 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig do - returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + returns( + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileCreateParams::Attribute::Variants + ] + ) + ) end attr_accessor :attributes @@ -60,7 +67,12 @@ module OpenAI params( file_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam::OrHash, @@ -92,7 +104,12 @@ module OpenAI { file_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam, diff --git a/rbi/openai/models/vector_stores/file_update_params.rbi b/rbi/openai/models/vector_stores/file_update_params.rbi index c8d7d898..da5190dc 100644 --- a/rbi/openai/models/vector_stores/file_update_params.rbi +++ b/rbi/openai/models/vector_stores/file_update_params.rbi @@ -24,7 +24,14 @@ module OpenAI # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. sig do - returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) + returns( + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileUpdateParams::Attribute::Variants + ] + ) + ) end attr_accessor :attributes @@ -32,7 +39,12 @@ module OpenAI params( vector_store_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileUpdateParams::Attribute::Variants + ] + ), request_options: OpenAI::RequestOptions::OrHash ).returns(T.attached_class) end @@ -53,7 +65,12 @@ module OpenAI { vector_store_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileUpdateParams::Attribute::Variants + ] + ), request_options: OpenAI::RequestOptions } ) diff --git a/rbi/openai/models/vector_stores/vector_store_file.rbi b/rbi/openai/models/vector_stores/vector_store_file.rbi index 7cf4a149..9207da17 100644 --- a/rbi/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/openai/models/vector_stores/vector_store_file.rbi @@ -68,22 +68,20 @@ module OpenAI # querying for objects via API or the dashboard. Keys are strings with a maximum # length of 64 characters. Values are strings with a maximum length of 512 # characters, booleans, or numbers. - sig do - returns(T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)])) - end - attr_accessor :attributes - - # The strategy used to chunk the file. sig do returns( T.nilable( - T.any( - OpenAI::StaticFileChunkingStrategyObject, - OpenAI::OtherFileChunkingStrategyObject - ) + T::Hash[ + Symbol, + OpenAI::VectorStores::VectorStoreFile::Attribute::Variants + ] ) ) end + attr_accessor :attributes + + # The strategy used to chunk the file. + sig { returns(T.nilable(OpenAI::FileChunkingStrategy::Variants)) } attr_reader :chunking_strategy sig do @@ -110,7 +108,12 @@ module OpenAI usage_bytes: Integer, vector_store_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::VectorStoreFile::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::StaticFileChunkingStrategyObject::OrHash, @@ -165,12 +168,13 @@ module OpenAI usage_bytes: Integer, vector_store_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), - chunking_strategy: - T.any( - OpenAI::StaticFileChunkingStrategyObject, - OpenAI::OtherFileChunkingStrategyObject - ) + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::VectorStoreFile::Attribute::Variants + ] + ), + chunking_strategy: OpenAI::FileChunkingStrategy::Variants } ) end diff --git a/rbi/openai/resources/audio/transcriptions.rbi b/rbi/openai/resources/audio/transcriptions.rbi index 02148f4b..187218b0 100644 --- a/rbi/openai/resources/audio/transcriptions.rbi +++ b/rbi/openai/resources/audio/transcriptions.rbi @@ -10,7 +10,7 @@ module OpenAI # Transcribes audio into the input language. sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, model: T.any(String, OpenAI::AudioModel::OrSymbol), chunking_strategy: T.nilable( @@ -31,10 +31,7 @@ module OpenAI stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns( - T.any( - OpenAI::Audio::Transcription, - OpenAI::Audio::TranscriptionVerbose - ) + OpenAI::Models::Audio::TranscriptionCreateResponse::Variants ) end def create( @@ -94,7 +91,7 @@ module OpenAI # Transcribes audio into the input language. sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, model: T.any(String, OpenAI::AudioModel::OrSymbol), chunking_strategy: T.nilable( @@ -116,10 +113,7 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Audio::TranscriptionTextDeltaEvent, - OpenAI::Audio::TranscriptionTextDoneEvent - ) + OpenAI::Audio::TranscriptionStreamEvent::Variants ] ) end diff --git a/rbi/openai/resources/audio/translations.rbi b/rbi/openai/resources/audio/translations.rbi index b2427856..bd8adba2 100644 --- a/rbi/openai/resources/audio/translations.rbi +++ b/rbi/openai/resources/audio/translations.rbi @@ -7,16 +7,14 @@ module OpenAI # Translates audio into English. sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, model: T.any(String, OpenAI::AudioModel::OrSymbol), prompt: String, response_format: OpenAI::Audio::TranslationCreateParams::ResponseFormat::OrSymbol, temperature: Float, request_options: OpenAI::RequestOptions::OrHash - ).returns( - T.any(OpenAI::Audio::Translation, OpenAI::Audio::TranslationVerbose) - ) + ).returns(OpenAI::Models::Audio::TranslationCreateResponse::Variants) end def create( # The audio file object (not file name) translate, in one of these formats: flac, diff --git a/rbi/openai/resources/beta/threads.rbi b/rbi/openai/resources/beta/threads.rbi index d3b01a5d..738bcf29 100644 --- a/rbi/openai/resources/beta/threads.rbi +++ b/rbi/openai/resources/beta/threads.rbi @@ -148,7 +148,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Beta::Threads::Run) @@ -296,37 +298,14 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy::OrHash + ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Beta::AssistantStreamEvent::ErrorEvent - ) + OpenAI::Beta::AssistantStreamEvent::Variants ] ) end diff --git a/rbi/openai/resources/beta/threads/messages.rbi b/rbi/openai/resources/beta/threads/messages.rbi index 593ceae0..1562cf34 100644 --- a/rbi/openai/resources/beta/threads/messages.rbi +++ b/rbi/openai/resources/beta/threads/messages.rbi @@ -10,16 +10,7 @@ module OpenAI params( thread_id: String, content: - T.any( - String, - T::Array[ - T.any( - OpenAI::Beta::Threads::ImageFileContentBlock::OrHash, - OpenAI::Beta::Threads::ImageURLContentBlock::OrHash, - OpenAI::Beta::Threads::TextContentBlockParam::OrHash - ) - ] - ), + OpenAI::Beta::Threads::MessageCreateParams::Content::Variants, role: OpenAI::Beta::Threads::MessageCreateParams::Role::OrSymbol, attachments: T.nilable( diff --git a/rbi/openai/resources/beta/threads/runs.rbi b/rbi/openai/resources/beta/threads/runs.rbi index 755fcfbc..ba97e4db 100644 --- a/rbi/openai/resources/beta/threads/runs.rbi +++ b/rbi/openai/resources/beta/threads/runs.rbi @@ -61,7 +61,9 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Beta::Threads::Run) @@ -235,37 +237,14 @@ module OpenAI ), top_p: T.nilable(Float), truncation_strategy: - T.nilable(OpenAI::Beta::TruncationObject::OrHash), + T.nilable( + OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy::OrHash + ), stream: T.noreturn, request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Beta::AssistantStreamEvent::ErrorEvent - ) + OpenAI::Beta::AssistantStreamEvent::Variants ] ) end @@ -535,32 +514,7 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Beta::AssistantStreamEvent::ThreadCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunQueued, - OpenAI::Beta::AssistantStreamEvent::ThreadRunInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunRequiresAction, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunIncomplete, - OpenAI::Beta::AssistantStreamEvent::ThreadRunFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelling, - OpenAI::Beta::AssistantStreamEvent::ThreadRunCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepFailed, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepCancelled, - OpenAI::Beta::AssistantStreamEvent::ThreadRunStepExpired, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCreated, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageInProgress, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageDelta, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageCompleted, - OpenAI::Beta::AssistantStreamEvent::ThreadMessageIncomplete, - OpenAI::Beta::AssistantStreamEvent::ErrorEvent - ) + OpenAI::Beta::AssistantStreamEvent::Variants ] ) end diff --git a/rbi/openai/resources/chat/completions.rbi b/rbi/openai/resources/chat/completions.rbi index 46d3f6ba..a54d1c53 100644 --- a/rbi/openai/resources/chat/completions.rbi +++ b/rbi/openai/resources/chat/completions.rbi @@ -70,6 +70,7 @@ module OpenAI T.any( OpenAI::ResponseFormatText::OrHash, OpenAI::ResponseFormatJSONSchema::OrHash, + OpenAI::StructuredOutput::JsonSchemaConverter, OpenAI::ResponseFormatJSONObject::OrHash ), seed: T.nilable(Integer), @@ -77,7 +78,8 @@ module OpenAI T.nilable( OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol ), - stop: T.nilable(T.any(String, T::Array[String])), + stop: + T.nilable(OpenAI::Chat::CompletionCreateParams::Stop::Variants), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), @@ -87,7 +89,13 @@ module OpenAI OpenAI::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Chat::ChatCompletionNamedToolChoice::OrHash ), - tools: T::Array[OpenAI::Chat::ChatCompletionTool::OrHash], + tools: + T::Array[ + T.any( + OpenAI::Chat::ChatCompletionTool::OrHash, + OpenAI::StructuredOutput::JsonSchemaConverter + ) + ], top_logprobs: T.nilable(Integer), top_p: T.nilable(Float), user: String, @@ -278,8 +286,8 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # This tool searches the web for relevant results to use in a response. Learn more @@ -363,7 +371,8 @@ module OpenAI T.nilable( OpenAI::Chat::CompletionCreateParams::ServiceTier::OrSymbol ), - stop: T.nilable(T.any(String, T::Array[String])), + stop: + T.nilable(OpenAI::Chat::CompletionCreateParams::Stop::Variants), store: T.nilable(T::Boolean), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), @@ -564,8 +573,8 @@ module OpenAI # # We generally recommend altering this or `temperature` but not both. top_p: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # This tool searches the web for relevant results to use in a response. Learn more diff --git a/rbi/openai/resources/completions.rbi b/rbi/openai/resources/completions.rbi index 685399d1..507da5ba 100644 --- a/rbi/openai/resources/completions.rbi +++ b/rbi/openai/resources/completions.rbi @@ -9,15 +9,7 @@ module OpenAI sig do params( model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), - prompt: - T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -27,7 +19,7 @@ module OpenAI n: T.nilable(Integer), presence_penalty: T.nilable(Float), seed: T.nilable(Integer), - stop: T.nilable(T.any(String, T::Array[String])), + stop: T.nilable(OpenAI::CompletionCreateParams::Stop::Variants), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), suffix: T.nilable(String), @@ -157,15 +149,7 @@ module OpenAI sig do params( model: T.any(String, OpenAI::CompletionCreateParams::Model::OrSymbol), - prompt: - T.nilable( - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ) - ), + prompt: T.nilable(OpenAI::CompletionCreateParams::Prompt::Variants), best_of: T.nilable(Integer), echo: T.nilable(T::Boolean), frequency_penalty: T.nilable(Float), @@ -175,7 +159,7 @@ module OpenAI n: T.nilable(Integer), presence_penalty: T.nilable(Float), seed: T.nilable(Integer), - stop: T.nilable(T.any(String, T::Array[String])), + stop: T.nilable(OpenAI::CompletionCreateParams::Stop::Variants), stream_options: T.nilable(OpenAI::Chat::ChatCompletionStreamOptions::OrHash), suffix: T.nilable(String), diff --git a/rbi/openai/resources/containers.rbi b/rbi/openai/resources/containers.rbi new file mode 100644 index 00000000..fc23a5f6 --- /dev/null +++ b/rbi/openai/resources/containers.rbi @@ -0,0 +1,86 @@ +# typed: strong + +module OpenAI + module Resources + class Containers + sig { returns(OpenAI::Resources::Containers::Files) } + attr_reader :files + + # Create Container + sig do + params( + name: String, + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter::OrHash, + file_ids: T::Array[String], + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::ContainerCreateResponse) + end + def create( + # Name of the container to create. + name:, + # Container expiration time in seconds relative to the 'anchor' time. + expires_after: nil, + # IDs of files to copy to the container. + file_ids: nil, + request_options: {} + ) + end + + # Retrieve Container + sig do + params( + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::ContainerRetrieveResponse) + end + def retrieve(container_id, request_options: {}) + end + + # List Containers + sig do + params( + after: String, + limit: Integer, + order: OpenAI::ContainerListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[OpenAI::Models::ContainerListResponse] + ) + end + def list( + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) + end + + # Delete Container + sig do + params( + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).void + end + def delete( + # The ID of the container to delete. + container_id, + request_options: {} + ) + end + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end +end diff --git a/rbi/openai/resources/containers/files.rbi b/rbi/openai/resources/containers/files.rbi new file mode 100644 index 00000000..3f49be09 --- /dev/null +++ b/rbi/openai/resources/containers/files.rbi @@ -0,0 +1,92 @@ +# typed: strong + +module OpenAI + module Resources + class Containers + class Files + sig { returns(OpenAI::Resources::Containers::Files::Content) } + attr_reader :content + + # Create a Container File + # + # You can send either a multipart/form-data request with the raw file content, or + # a JSON request with a file ID. + sig do + params( + container_id: String, + file: OpenAI::Internal::FileInput, + file_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::Containers::FileCreateResponse) + end + def create( + container_id, + # The File object (not file name) to be uploaded. + file: nil, + # Name of the file to create. + file_id: nil, + request_options: {} + ) + end + + # Retrieve Container File + sig do + params( + file_id: String, + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).returns(OpenAI::Models::Containers::FileRetrieveResponse) + end + def retrieve(file_id, container_id:, request_options: {}) + end + + # List Container files + sig do + params( + container_id: String, + after: String, + limit: Integer, + order: OpenAI::Containers::FileListParams::Order::OrSymbol, + request_options: OpenAI::RequestOptions::OrHash + ).returns( + OpenAI::Internal::CursorPage[ + OpenAI::Models::Containers::FileListResponse + ] + ) + end + def list( + container_id, + # A cursor for use in pagination. `after` is an object ID that defines your place + # in the list. For instance, if you make a list request and receive 100 objects, + # ending with obj_foo, your subsequent call can include after=obj_foo in order to + # fetch the next page of the list. + after: nil, + # A limit on the number of objects to be returned. Limit can range between 1 and + # 100, and the default is 20. + limit: nil, + # Sort order by the `created_at` timestamp of the objects. `asc` for ascending + # order and `desc` for descending order. + order: nil, + request_options: {} + ) + end + + # Delete Container File + sig do + params( + file_id: String, + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).void + end + def delete(file_id, container_id:, request_options: {}) + end + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end + end +end diff --git a/rbi/openai/resources/containers/files/content.rbi b/rbi/openai/resources/containers/files/content.rbi new file mode 100644 index 00000000..a299fe68 --- /dev/null +++ b/rbi/openai/resources/containers/files/content.rbi @@ -0,0 +1,27 @@ +# typed: strong + +module OpenAI + module Resources + class Containers + class Files + class Content + # Retrieve Container File Content + sig do + params( + file_id: String, + container_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).void + end + def retrieve(file_id, container_id:, request_options: {}) + end + + # @api private + sig { params(client: OpenAI::Client).returns(T.attached_class) } + def self.new(client:) + end + end + end + end + end +end diff --git a/rbi/openai/resources/embeddings.rbi b/rbi/openai/resources/embeddings.rbi index 061e114c..bfdafb67 100644 --- a/rbi/openai/resources/embeddings.rbi +++ b/rbi/openai/resources/embeddings.rbi @@ -6,13 +6,7 @@ module OpenAI # Creates an embedding vector representing the input text. sig do params( - input: - T.any( - String, - T::Array[String], - T::Array[Integer], - T::Array[T::Array[Integer]] - ), + input: OpenAI::EmbeddingCreateParams::Input::Variants, model: T.any(String, OpenAI::EmbeddingModel::OrSymbol), dimensions: Integer, encoding_format: diff --git a/rbi/openai/resources/evals.rbi b/rbi/openai/resources/evals.rbi index c7b5ca65..9ec9f490 100644 --- a/rbi/openai/resources/evals.rbi +++ b/rbi/openai/resources/evals.rbi @@ -7,7 +7,8 @@ module OpenAI attr_reader :runs # Create the structure of an evaluation that can be used to test a model's - # performance. An evaluation is a set of testing criteria and a datasource. After + # performance. An evaluation is a set of testing criteria and the config for a + # data source, which dictates the schema of the data used in the evaluation. After # creating an evaluation, you can run it on different models and model parameters. # We support several types of graders and datasources. For more information, see # the [Evals guide](https://platform.openai.com/docs/guides/evals). @@ -35,9 +36,13 @@ module OpenAI ).returns(OpenAI::Models::EvalCreateResponse) end def create( - # The configuration for the data source used for the evaluation runs. + # The configuration for the data source used for the evaluation runs. Dictates the + # schema of the data used in the evaluation. data_source_config:, - # A list of graders for all eval runs in this group. + # A list of graders for all eval runs in this group. Graders can reference + # variables in the data source using double curly braces notation, like + # `{{item.variable_name}}`. To reference the model's output, use the `sample` + # namespace (ie, `{{sample.output_text}}`). testing_criteria:, # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and diff --git a/rbi/openai/resources/evals/runs.rbi b/rbi/openai/resources/evals/runs.rbi index 86e351a0..20df812d 100644 --- a/rbi/openai/resources/evals/runs.rbi +++ b/rbi/openai/resources/evals/runs.rbi @@ -7,7 +7,9 @@ module OpenAI sig { returns(OpenAI::Resources::Evals::Runs::OutputItems) } attr_reader :output_items - # Create a new evaluation run. This is the endpoint that will kick off grading. + # Kicks off a new run for a given evaluation, specifying the data source, and what + # model configuration to use to test. The datasource will be validated against the + # schema specified in the config of the evaluation. sig do params( eval_id: String, @@ -15,7 +17,7 @@ module OpenAI T.any( OpenAI::Evals::CreateEvalJSONLRunDataSource::OrHash, OpenAI::Evals::CreateEvalCompletionsRunDataSource::OrHash, - OpenAI::Evals::CreateEvalResponsesRunDataSource::OrHash + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::OrHash ), metadata: T.nilable(T::Hash[Symbol, String]), name: String, diff --git a/rbi/openai/resources/files.rbi b/rbi/openai/resources/files.rbi index 022613a9..9ac9665f 100644 --- a/rbi/openai/resources/files.rbi +++ b/rbi/openai/resources/files.rbi @@ -26,7 +26,7 @@ module OpenAI # storage limits. sig do params( - file: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + file: OpenAI::Internal::FileInput, purpose: OpenAI::FilePurpose::OrSymbol, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::FileObject) diff --git a/rbi/openai/resources/fine_tuning/alpha/graders.rbi b/rbi/openai/resources/fine_tuning/alpha/graders.rbi index 4e22b461..8b1683c1 100644 --- a/rbi/openai/resources/fine_tuning/alpha/graders.rbi +++ b/rbi/openai/resources/fine_tuning/alpha/graders.rbi @@ -18,7 +18,7 @@ module OpenAI ), model_sample: String, reference_answer: - T.any(String, T.anything, T::Array[T.anything], Float), + OpenAI::FineTuning::Alpha::GraderRunParams::ReferenceAnswer::Variants, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Models::FineTuning::Alpha::GraderRunResponse) end diff --git a/rbi/openai/resources/images.rbi b/rbi/openai/resources/images.rbi index bc10faa9..e3a93ec3 100644 --- a/rbi/openai/resources/images.rbi +++ b/rbi/openai/resources/images.rbi @@ -6,7 +6,7 @@ module OpenAI # Creates a variation of a given image. This endpoint only supports `dall-e-2`. sig do params( - image: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + image: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), response_format: @@ -46,17 +46,10 @@ module OpenAI # prompt. This endpoint only supports `gpt-image-1` and `dall-e-2`. sig do params( - image: - T.any( - Pathname, - StringIO, - IO, - OpenAI::FilePart, - T::Array[T.any(Pathname, StringIO, IO, OpenAI::FilePart)] - ), + image: OpenAI::ImageEditParams::Image::Variants, prompt: String, background: T.nilable(OpenAI::ImageEditParams::Background::OrSymbol), - mask: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + mask: OpenAI::Internal::FileInput, model: T.nilable(T.any(String, OpenAI::ImageModel::OrSymbol)), n: T.nilable(Integer), quality: T.nilable(OpenAI::ImageEditParams::Quality::OrSymbol), diff --git a/rbi/openai/resources/moderations.rbi b/rbi/openai/resources/moderations.rbi index 06d15b68..38d862ca 100644 --- a/rbi/openai/resources/moderations.rbi +++ b/rbi/openai/resources/moderations.rbi @@ -7,17 +7,7 @@ module OpenAI # the [moderation guide](https://platform.openai.com/docs/guides/moderation). sig do params( - input: - T.any( - String, - T::Array[String], - T::Array[ - T.any( - OpenAI::ModerationImageURLInput::OrHash, - OpenAI::ModerationTextInput::OrHash - ) - ] - ), + input: OpenAI::ModerationCreateParams::Input::Variants, model: T.any(String, OpenAI::ModerationModel::OrSymbol), request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Models::ModerationCreateResponse) diff --git a/rbi/openai/resources/responses.rbi b/rbi/openai/resources/responses.rbi index 2c880f53..3753b593 100644 --- a/rbi/openai/resources/responses.rbi +++ b/rbi/openai/resources/responses.rbi @@ -21,31 +21,14 @@ module OpenAI # your own data as input for the model's response. sig do params( - input: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage::OrHash, - OpenAI::Responses::ResponseInputItem::Message::OrHash, - OpenAI::Responses::ResponseOutputMessage::OrHash, - OpenAI::Responses::ResponseFileSearchToolCall::OrHash, - OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, - OpenAI::Responses::ResponseFunctionWebSearch::OrHash, - OpenAI::Responses::ResponseFunctionToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash, - OpenAI::Responses::ResponseInputItem::ItemReference::OrHash - ) - ] - ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, model: T.any( String, OpenAI::ChatModel::OrSymbol, OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol ), + background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -72,9 +55,13 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -105,6 +92,9 @@ module OpenAI # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. model:, + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + background: nil, # Specify additional output data to include in the model response. Currently # supported values are: # @@ -212,8 +202,8 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. truncation: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` @@ -238,31 +228,14 @@ module OpenAI # your own data as input for the model's response. sig do params( - input: - T.any( - String, - T::Array[ - T.any( - OpenAI::Responses::EasyInputMessage::OrHash, - OpenAI::Responses::ResponseInputItem::Message::OrHash, - OpenAI::Responses::ResponseOutputMessage::OrHash, - OpenAI::Responses::ResponseFileSearchToolCall::OrHash, - OpenAI::Responses::ResponseComputerToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::ComputerCallOutput::OrHash, - OpenAI::Responses::ResponseFunctionWebSearch::OrHash, - OpenAI::Responses::ResponseFunctionToolCall::OrHash, - OpenAI::Responses::ResponseInputItem::FunctionCallOutput::OrHash, - OpenAI::Responses::ResponseReasoningItem::OrHash, - OpenAI::Responses::ResponseInputItem::ItemReference::OrHash - ) - ] - ), + input: OpenAI::Responses::ResponseCreateParams::Input::Variants, model: T.any( String, OpenAI::ChatModel::OrSymbol, OpenAI::ResponsesModel::ResponsesOnlyModel::OrSymbol ), + background: T.nilable(T::Boolean), include: T.nilable( T::Array[OpenAI::Responses::ResponseIncludable::OrSymbol] @@ -289,9 +262,13 @@ module OpenAI tools: T::Array[ T.any( - OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::FunctionTool::OrHash, + OpenAI::Responses::FileSearchTool::OrHash, OpenAI::Responses::ComputerTool::OrHash, + OpenAI::Responses::Tool::Mcp::OrHash, + OpenAI::Responses::Tool::CodeInterpreter::OrHash, + OpenAI::Responses::Tool::ImageGeneration::OrHash, + OpenAI::Responses::Tool::LocalShell::OrHash, OpenAI::Responses::WebSearchTool::OrHash ) ], @@ -305,44 +282,7 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::Stream[ - T.any( - OpenAI::Responses::ResponseAudioDeltaEvent, - OpenAI::Responses::ResponseAudioDoneEvent, - OpenAI::Responses::ResponseAudioTranscriptDeltaEvent, - OpenAI::Responses::ResponseAudioTranscriptDoneEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCodeDoneEvent, - OpenAI::Responses::ResponseCodeInterpreterCallCompletedEvent, - OpenAI::Responses::ResponseCodeInterpreterCallInProgressEvent, - OpenAI::Responses::ResponseCodeInterpreterCallInterpretingEvent, - OpenAI::Responses::ResponseCompletedEvent, - OpenAI::Responses::ResponseContentPartAddedEvent, - OpenAI::Responses::ResponseContentPartDoneEvent, - OpenAI::Responses::ResponseCreatedEvent, - OpenAI::Responses::ResponseErrorEvent, - OpenAI::Responses::ResponseFileSearchCallCompletedEvent, - OpenAI::Responses::ResponseFileSearchCallInProgressEvent, - OpenAI::Responses::ResponseFileSearchCallSearchingEvent, - OpenAI::Responses::ResponseFunctionCallArgumentsDeltaEvent, - OpenAI::Responses::ResponseFunctionCallArgumentsDoneEvent, - OpenAI::Responses::ResponseInProgressEvent, - OpenAI::Responses::ResponseFailedEvent, - OpenAI::Responses::ResponseIncompleteEvent, - OpenAI::Responses::ResponseOutputItemAddedEvent, - OpenAI::Responses::ResponseOutputItemDoneEvent, - OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent, - OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent, - OpenAI::Responses::ResponseReasoningSummaryTextDeltaEvent, - OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent, - OpenAI::Responses::ResponseRefusalDeltaEvent, - OpenAI::Responses::ResponseRefusalDoneEvent, - OpenAI::Responses::ResponseTextAnnotationDeltaEvent, - OpenAI::Responses::ResponseTextDeltaEvent, - OpenAI::Responses::ResponseTextDoneEvent, - OpenAI::Responses::ResponseWebSearchCallCompletedEvent, - OpenAI::Responses::ResponseWebSearchCallInProgressEvent, - OpenAI::Responses::ResponseWebSearchCallSearchingEvent - ) + OpenAI::Responses::ResponseStreamEvent::Variants ] ) end @@ -363,6 +303,9 @@ module OpenAI # [model guide](https://platform.openai.com/docs/models) to browse and compare # available models. model:, + # Whether to run the model response in the background. + # [Learn more](https://platform.openai.com/docs/guides/background). + background: nil, # Specify additional output data to include in the model response. Currently # supported values are: # @@ -470,8 +413,8 @@ module OpenAI # - `disabled` (default): If a model response will exceed the context window size # for a model, the request will fail with a 400 error. truncation: nil, - # A unique identifier representing your end-user, which can help OpenAI to monitor - # and detect abuse. + # A stable identifier for your end-users. Used to boost cache hit rates by better + # bucketing similar requests and to help OpenAI detect and prevent abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). user: nil, # There is no need to provide `stream:`. Instead, use `#stream_raw` or `#create` @@ -513,6 +456,22 @@ module OpenAI ) end + # Cancels a model response with the given ID. Only responses created with the + # `background` parameter set to `true` can be cancelled. + # [Learn more](https://platform.openai.com/docs/guides/background). + sig do + params( + response_id: String, + request_options: OpenAI::RequestOptions::OrHash + ).void + end + def cancel( + # The ID of the response to cancel. + response_id, + request_options: {} + ) + end + # @api private sig { params(client: OpenAI::Client).returns(T.attached_class) } def self.new(client:) diff --git a/rbi/openai/resources/responses/input_items.rbi b/rbi/openai/resources/responses/input_items.rbi index feef44a4..db8126f6 100644 --- a/rbi/openai/resources/responses/input_items.rbi +++ b/rbi/openai/resources/responses/input_items.rbi @@ -16,16 +16,7 @@ module OpenAI request_options: OpenAI::RequestOptions::OrHash ).returns( OpenAI::Internal::CursorPage[ - T.any( - OpenAI::Responses::ResponseInputMessageItem, - OpenAI::Responses::ResponseOutputMessage, - OpenAI::Responses::ResponseFileSearchToolCall, - OpenAI::Responses::ResponseComputerToolCall, - OpenAI::Responses::ResponseComputerToolCallOutputItem, - OpenAI::Responses::ResponseFunctionWebSearch, - OpenAI::Responses::ResponseFunctionToolCallItem, - OpenAI::Responses::ResponseFunctionToolCallOutputItem - ) + OpenAI::Responses::ResponseItem::Variants ] ) end @@ -42,7 +33,7 @@ module OpenAI # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. limit: nil, - # The order to return the input items in. Default is `asc`. + # The order to return the input items in. Default is `desc`. # # - `asc`: Return the input items in ascending order. # - `desc`: Return the input items in descending order. diff --git a/rbi/openai/resources/uploads/parts.rbi b/rbi/openai/resources/uploads/parts.rbi index ae17f0fc..055b46b9 100644 --- a/rbi/openai/resources/uploads/parts.rbi +++ b/rbi/openai/resources/uploads/parts.rbi @@ -18,7 +18,7 @@ module OpenAI sig do params( upload_id: String, - data: T.any(Pathname, StringIO, IO, OpenAI::FilePart), + data: OpenAI::Internal::FileInput, request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::Uploads::UploadPart) end diff --git a/rbi/openai/resources/vector_stores.rbi b/rbi/openai/resources/vector_stores.rbi index 802f6110..727abd63 100644 --- a/rbi/openai/resources/vector_stores.rbi +++ b/rbi/openai/resources/vector_stores.rbi @@ -17,7 +17,7 @@ module OpenAI OpenAI::AutoFileChunkingStrategyParam::OrHash, OpenAI::StaticFileChunkingStrategyObjectParam::OrHash ), - expires_after: OpenAI::VectorStoreExpirationAfter::OrHash, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter::OrHash, file_ids: T::Array[String], metadata: T.nilable(T::Hash[Symbol, String]), name: String, @@ -65,7 +65,8 @@ module OpenAI sig do params( vector_store_id: String, - expires_after: T.nilable(OpenAI::VectorStoreExpirationAfter::OrHash), + expires_after: + T.nilable(OpenAI::VectorStoreUpdateParams::ExpiresAfter::OrHash), metadata: T.nilable(T::Hash[Symbol, String]), name: T.nilable(String), request_options: OpenAI::RequestOptions::OrHash @@ -139,7 +140,7 @@ module OpenAI sig do params( vector_store_id: String, - query: T.any(String, T::Array[String]), + query: OpenAI::VectorStoreSearchParams::Query::Variants, filters: T.any( OpenAI::ComparisonFilter::OrHash, diff --git a/rbi/openai/resources/vector_stores/file_batches.rbi b/rbi/openai/resources/vector_stores/file_batches.rbi index 43c47915..c6aca892 100644 --- a/rbi/openai/resources/vector_stores/file_batches.rbi +++ b/rbi/openai/resources/vector_stores/file_batches.rbi @@ -10,7 +10,12 @@ module OpenAI vector_store_id: String, file_ids: T::Array[String], attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileBatchCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam::OrHash, diff --git a/rbi/openai/resources/vector_stores/files.rbi b/rbi/openai/resources/vector_stores/files.rbi index 8d760b4a..711c88f0 100644 --- a/rbi/openai/resources/vector_stores/files.rbi +++ b/rbi/openai/resources/vector_stores/files.rbi @@ -12,7 +12,12 @@ module OpenAI vector_store_id: String, file_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileCreateParams::Attribute::Variants + ] + ), chunking_strategy: T.any( OpenAI::AutoFileChunkingStrategyParam::OrHash, @@ -64,7 +69,12 @@ module OpenAI file_id: String, vector_store_id: String, attributes: - T.nilable(T::Hash[Symbol, T.any(String, Float, T::Boolean)]), + T.nilable( + T::Hash[ + Symbol, + OpenAI::VectorStores::FileUpdateParams::Attribute::Variants + ] + ), request_options: OpenAI::RequestOptions::OrHash ).returns(OpenAI::VectorStores::VectorStoreFile) end diff --git a/rbi/openai/structured_output.rbi b/rbi/openai/structured_output.rbi new file mode 100644 index 00000000..2c5bf0b3 --- /dev/null +++ b/rbi/openai/structured_output.rbi @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +module OpenAI + StructuredOutput = OpenAI::Helpers::StructuredOutput + ArrayOf = OpenAI::Helpers::StructuredOutput::ArrayOf + BaseModel = OpenAI::Helpers::StructuredOutput::BaseModel + Boolean = OpenAI::Helpers::StructuredOutput::Boolean + EnumOf = OpenAI::Helpers::StructuredOutput::EnumOf + UnionOf = OpenAI::Helpers::StructuredOutput::UnionOf +end diff --git a/sig/openai/client.rbs b/sig/openai/client.rbs index 929bc894..2b085363 100644 --- a/sig/openai/client.rbs +++ b/sig/openai/client.rbs @@ -46,6 +46,8 @@ module OpenAI attr_reader evals: OpenAI::Resources::Evals + attr_reader containers: OpenAI::Resources::Containers + private def auth_headers: -> ::Hash[String, String] def initialize: ( diff --git a/sig/openai/internal.rbs b/sig/openai/internal.rbs index 8dc7f62a..105072ce 100644 --- a/sig/openai/internal.rbs +++ b/sig/openai/internal.rbs @@ -2,6 +2,8 @@ module OpenAI module Internal extend OpenAI::Internal::Util::SorbetRuntimeSupport + type file_input = Pathname | StringIO | IO | String | OpenAI::FilePart + OMIT: Object end end diff --git a/sig/openai/internal/type/array_of.rbs b/sig/openai/internal/type/array_of.rbs index 80fcc2a2..000ed3f5 100644 --- a/sig/openai/internal/type/array_of.rbs +++ b/sig/openai/internal/type/array_of.rbs @@ -3,6 +3,7 @@ module OpenAI module Type class ArrayOf[Elem] include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport def self.[]: ( ::Hash[Symbol, top] @@ -27,6 +28,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> (::Array[top] | top) + def to_sorbet_type: -> top + def item_type: -> Elem def nilable?: -> bool diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index 67646785..f9e57a2e 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -68,6 +68,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> (::Hash[top, top] | top) + def self.to_sorbet_type: -> top + def self.recursively_to_h: ( OpenAI::Internal::Type::BaseModel model, convert: bool diff --git a/sig/openai/internal/type/boolean.rbs b/sig/openai/internal/type/boolean.rbs index 0a654ab4..04edea4b 100644 --- a/sig/openai/internal/type/boolean.rbs +++ b/sig/openai/internal/type/boolean.rbs @@ -3,6 +3,7 @@ module OpenAI module Type class Boolean extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport def self.===: (top other) -> bool @@ -17,6 +18,8 @@ module OpenAI bool | top value, state: OpenAI::Internal::Type::Converter::dump_state ) -> (bool | top) + + def self.to_sorbet_type: -> top end end end diff --git a/sig/openai/internal/type/enum.rbs b/sig/openai/internal/type/enum.rbs index 69f1c6bd..2b6f3fb2 100644 --- a/sig/openai/internal/type/enum.rbs +++ b/sig/openai/internal/type/enum.rbs @@ -23,6 +23,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> (Symbol | top) + def to_sorbet_type: -> top + def inspect: (?depth: Integer) -> String end end diff --git a/sig/openai/internal/type/file_input.rbs b/sig/openai/internal/type/file_input.rbs index 862c2111..db81644c 100644 --- a/sig/openai/internal/type/file_input.rbs +++ b/sig/openai/internal/type/file_input.rbs @@ -17,6 +17,8 @@ module OpenAI Pathname | StringIO | IO | String | top value, state: OpenAI::Internal::Type::Converter::dump_state ) -> (Pathname | StringIO | IO | String | top) + + def self.to_sorbet_type: -> top end end end diff --git a/sig/openai/internal/type/hash_of.rbs b/sig/openai/internal/type/hash_of.rbs index 26f65397..1c9d1d58 100644 --- a/sig/openai/internal/type/hash_of.rbs +++ b/sig/openai/internal/type/hash_of.rbs @@ -3,6 +3,7 @@ module OpenAI module Type class HashOf[Elem] include OpenAI::Internal::Type::Converter + include OpenAI::Internal::Util::SorbetRuntimeSupport def self.[]: ( ::Hash[Symbol, top] @@ -27,6 +28,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> (::Hash[Symbol, top] | top) + def to_sorbet_type: -> top + def item_type: -> Elem def nilable?: -> bool diff --git a/sig/openai/internal/type/union.rbs b/sig/openai/internal/type/union.rbs index 57d122c3..7a01ff6e 100644 --- a/sig/openai/internal/type/union.rbs +++ b/sig/openai/internal/type/union.rbs @@ -43,6 +43,8 @@ module OpenAI state: OpenAI::Internal::Type::Converter::dump_state ) -> top + def to_sorbet_type: -> top + def inspect: (?depth: Integer) -> String end end diff --git a/sig/openai/internal/type/unknown.rbs b/sig/openai/internal/type/unknown.rbs index 0f9142d2..249f91dc 100644 --- a/sig/openai/internal/type/unknown.rbs +++ b/sig/openai/internal/type/unknown.rbs @@ -3,6 +3,7 @@ module OpenAI module Type class Unknown extend OpenAI::Internal::Type::Converter + extend OpenAI::Internal::Util::SorbetRuntimeSupport def self.===: (top other) -> bool @@ -17,6 +18,8 @@ module OpenAI top value, state: OpenAI::Internal::Type::Converter::dump_state ) -> top + + def self.to_sorbet_type: -> top end end end diff --git a/sig/openai/internal/util.rbs b/sig/openai/internal/util.rbs index c8416c7c..ec425e9f 100644 --- a/sig/openai/internal/util.rbs +++ b/sig/openai/internal/util.rbs @@ -5,6 +5,10 @@ module OpenAI def self?.monotonic_secs: -> Float + def self?.walk_namespaces: ( + Module | Class ns + ) -> Enumerable[(Module | Class)] + def self?.arch: -> String def self?.os: -> String @@ -166,7 +170,15 @@ module OpenAI def const_missing: (Symbol name) -> void + def sorbet_constant_defined?: (Symbol name) -> bool + def define_sorbet_constant!: (Symbol name) { -> top } -> void + + def to_sorbet_type: -> top + + def self.to_sorbet_type: ( + OpenAI::Internal::Util::SorbetRuntimeSupport | top `type` + ) -> top end end end diff --git a/sig/openai/models.rbs b/sig/openai/models.rbs index e151276a..3d40bce0 100644 --- a/sig/openai/models.rbs +++ b/sig/openai/models.rbs @@ -41,6 +41,16 @@ module OpenAI class CompoundFilter = OpenAI::Models::CompoundFilter + class ContainerCreateParams = OpenAI::Models::ContainerCreateParams + + class ContainerDeleteParams = OpenAI::Models::ContainerDeleteParams + + class ContainerListParams = OpenAI::Models::ContainerListParams + + class ContainerRetrieveParams = OpenAI::Models::ContainerRetrieveParams + + module Containers = OpenAI::Models::Containers + class CreateEmbeddingResponse = OpenAI::Models::CreateEmbeddingResponse class Embedding = OpenAI::Models::Embedding @@ -57,12 +67,8 @@ module OpenAI class EvalDeleteParams = OpenAI::Models::EvalDeleteParams - class EvalItem = OpenAI::Models::EvalItem - class EvalListParams = OpenAI::Models::EvalListParams - class EvalLogsDataSourceConfig = OpenAI::Models::EvalLogsDataSourceConfig - class EvalRetrieveParams = OpenAI::Models::EvalRetrieveParams module Evals = OpenAI::Models::Evals @@ -177,8 +183,6 @@ module OpenAI class VectorStoreDeleteParams = OpenAI::Models::VectorStoreDeleteParams - class VectorStoreExpirationAfter = OpenAI::Models::VectorStoreExpirationAfter - class VectorStoreListParams = OpenAI::Models::VectorStoreListParams class VectorStoreRetrieveParams = OpenAI::Models::VectorStoreRetrieveParams diff --git a/sig/openai/models/all_models.rbs b/sig/openai/models/all_models.rbs index 90320393..4d649383 100644 --- a/sig/openai/models/all_models.rbs +++ b/sig/openai/models/all_models.rbs @@ -3,7 +3,7 @@ module OpenAI type all_models = String | OpenAI::Models::chat_model - | OpenAI::AllModels::responses_only_model + | OpenAI::Models::AllModels::responses_only_model module AllModels extend OpenAI::Internal::Type::Union @@ -22,7 +22,7 @@ module OpenAI COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self?.values: -> ::Array[OpenAI::AllModels::responses_only_model] + def self?.values: -> ::Array[OpenAI::Models::AllModels::responses_only_model] end def self?.variants: -> ::Array[OpenAI::Models::all_models] diff --git a/sig/openai/models/audio/speech_create_params.rbs b/sig/openai/models/audio/speech_create_params.rbs index 2faac5ef..6c188596 100644 --- a/sig/openai/models/audio/speech_create_params.rbs +++ b/sig/openai/models/audio/speech_create_params.rbs @@ -46,6 +46,16 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + input: String, + model: OpenAI::Models::Audio::SpeechCreateParams::model, + voice: OpenAI::Models::Audio::SpeechCreateParams::voice, + instructions: String, + response_format: OpenAI::Models::Audio::SpeechCreateParams::response_format, + speed: Float, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::Audio::speech_model module Model diff --git a/sig/openai/models/audio/transcription.rbs b/sig/openai/models/audio/transcription.rbs index 3e522926..2e1beee5 100644 --- a/sig/openai/models/audio/transcription.rbs +++ b/sig/openai/models/audio/transcription.rbs @@ -21,6 +21,11 @@ module OpenAI ?logprobs: ::Array[OpenAI::Audio::Transcription::Logprob] ) -> void + def to_hash: -> { + text: String, + logprobs: ::Array[OpenAI::Audio::Transcription::Logprob] + } + type logprob = { token: String, bytes: ::Array[Float], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel @@ -41,6 +46,12 @@ module OpenAI ?bytes: ::Array[Float], ?logprob: Float ) -> void + + def to_hash: -> { + token: String, + bytes: ::Array[Float], + logprob: Float + } end end end diff --git a/sig/openai/models/audio/transcription_create_params.rbs b/sig/openai/models/audio/transcription_create_params.rbs index 18d8e5aa..28e08060 100644 --- a/sig/openai/models/audio/transcription_create_params.rbs +++ b/sig/openai/models/audio/transcription_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI module Audio type transcription_create_params = { - file: (Pathname | StringIO | IO | OpenAI::FilePart), + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, include: ::Array[OpenAI::Models::Audio::transcription_include], @@ -11,7 +11,7 @@ module OpenAI prompt: String, response_format: OpenAI::Models::audio_response_format, temperature: Float, - timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] + timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] } & OpenAI::Internal::Type::request_parameters @@ -19,7 +19,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: Pathname | StringIO | IO | OpenAI::FilePart + attr_accessor file: OpenAI::Internal::file_input attr_accessor model: OpenAI::Models::Audio::TranscriptionCreateParams::model @@ -49,14 +49,14 @@ module OpenAI def temperature=: (Float) -> Float - attr_reader timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity]? + attr_reader timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity]? def timestamp_granularities=: ( - ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] - ) -> ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] + ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] + ) -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] def initialize: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, ?include: ::Array[OpenAI::Models::Audio::transcription_include], @@ -64,10 +64,23 @@ module OpenAI ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity], + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + file: OpenAI::Internal::file_input, + model: OpenAI::Models::Audio::TranscriptionCreateParams::model, + chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, + include: ::Array[OpenAI::Models::Audio::transcription_include], + language: String, + prompt: String, + response_format: OpenAI::Models::audio_response_format, + temperature: Float, + timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::audio_model module Model @@ -85,14 +98,14 @@ module OpenAI type vad_config = { - type: OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, + type: OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, prefix_padding_ms: Integer, silence_duration_ms: Integer, threshold: Float } class VadConfig < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_ + attr_accessor type: OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_ attr_reader prefix_padding_ms: Integer? @@ -107,12 +120,19 @@ module OpenAI def threshold=: (Float) -> Float def initialize: ( - type: OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, + type: OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, ?prefix_padding_ms: Integer, ?silence_duration_ms: Integer, ?threshold: Float ) -> void + def to_hash: -> { + type: OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_, + prefix_padding_ms: Integer, + silence_duration_ms: Integer, + threshold: Float + } + type type_ = :server_vad module Type @@ -120,7 +140,7 @@ module OpenAI SERVER_VAD: :server_vad - def self?.values: -> ::Array[OpenAI::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_] + def self?.values: -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::ChunkingStrategy::VadConfig::type_] end end @@ -135,7 +155,7 @@ module OpenAI WORD: :word SEGMENT: :segment - def self?.values: -> ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity] + def self?.values: -> ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity] end end end diff --git a/sig/openai/models/audio/transcription_segment.rbs b/sig/openai/models/audio/transcription_segment.rbs index 9c38a1be..82da7095 100644 --- a/sig/openai/models/audio/transcription_segment.rbs +++ b/sig/openai/models/audio/transcription_segment.rbs @@ -48,6 +48,19 @@ module OpenAI text: String, tokens: ::Array[Integer] ) -> void + + def to_hash: -> { + id: Integer, + avg_logprob: Float, + compression_ratio: Float, + end_: Float, + no_speech_prob: Float, + seek: Integer, + start: Float, + temperature: Float, + text: String, + tokens: ::Array[Integer] + } end end end diff --git a/sig/openai/models/audio/transcription_text_delta_event.rbs b/sig/openai/models/audio/transcription_text_delta_event.rbs index 09c419d7..155b8e1d 100644 --- a/sig/openai/models/audio/transcription_text_delta_event.rbs +++ b/sig/openai/models/audio/transcription_text_delta_event.rbs @@ -25,6 +25,12 @@ module OpenAI ?type: :"transcript.text.delta" ) -> void + def to_hash: -> { + delta: String, + type: :"transcript.text.delta", + logprobs: ::Array[OpenAI::Audio::TranscriptionTextDeltaEvent::Logprob] + } + type logprob = { token: String, bytes: ::Array[top], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel @@ -45,6 +51,8 @@ module OpenAI ?bytes: ::Array[top], ?logprob: Float ) -> void + + def to_hash: -> { token: String, bytes: ::Array[top], logprob: Float } end end end diff --git a/sig/openai/models/audio/transcription_text_done_event.rbs b/sig/openai/models/audio/transcription_text_done_event.rbs index c2fb0bc1..d8f864e3 100644 --- a/sig/openai/models/audio/transcription_text_done_event.rbs +++ b/sig/openai/models/audio/transcription_text_done_event.rbs @@ -25,6 +25,12 @@ module OpenAI ?type: :"transcript.text.done" ) -> void + def to_hash: -> { + text: String, + type: :"transcript.text.done", + logprobs: ::Array[OpenAI::Audio::TranscriptionTextDoneEvent::Logprob] + } + type logprob = { token: String, bytes: ::Array[top], logprob: Float } class Logprob < OpenAI::Internal::Type::BaseModel @@ -45,6 +51,8 @@ module OpenAI ?bytes: ::Array[top], ?logprob: Float ) -> void + + def to_hash: -> { token: String, bytes: ::Array[top], logprob: Float } end end end diff --git a/sig/openai/models/audio/transcription_verbose.rbs b/sig/openai/models/audio/transcription_verbose.rbs index 6e40e651..171bfa08 100644 --- a/sig/openai/models/audio/transcription_verbose.rbs +++ b/sig/openai/models/audio/transcription_verbose.rbs @@ -36,6 +36,14 @@ module OpenAI ?segments: ::Array[OpenAI::Audio::TranscriptionSegment], ?words: ::Array[OpenAI::Audio::TranscriptionWord] ) -> void + + def to_hash: -> { + duration: Float, + language: String, + text: String, + segments: ::Array[OpenAI::Audio::TranscriptionSegment], + words: ::Array[OpenAI::Audio::TranscriptionWord] + } end end end diff --git a/sig/openai/models/audio/transcription_word.rbs b/sig/openai/models/audio/transcription_word.rbs index eb48b12d..1bd7d752 100644 --- a/sig/openai/models/audio/transcription_word.rbs +++ b/sig/openai/models/audio/transcription_word.rbs @@ -11,6 +11,8 @@ module OpenAI attr_accessor word: String def initialize: (end_: Float, start: Float, word: String) -> void + + def to_hash: -> { end_: Float, start: Float, word: String } end end end diff --git a/sig/openai/models/audio/translation.rbs b/sig/openai/models/audio/translation.rbs index cc45a2b2..832a27d3 100644 --- a/sig/openai/models/audio/translation.rbs +++ b/sig/openai/models/audio/translation.rbs @@ -7,6 +7,8 @@ module OpenAI attr_accessor text: String def initialize: (text: String) -> void + + def to_hash: -> { text: String } end end end diff --git a/sig/openai/models/audio/translation_create_params.rbs b/sig/openai/models/audio/translation_create_params.rbs index 5a9c7d5f..da4cbb3c 100644 --- a/sig/openai/models/audio/translation_create_params.rbs +++ b/sig/openai/models/audio/translation_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI module Audio type translation_create_params = { - file: (Pathname | StringIO | IO | OpenAI::FilePart), + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranslationCreateParams::model, prompt: String, response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, @@ -15,7 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: Pathname | StringIO | IO | OpenAI::FilePart + attr_accessor file: OpenAI::Internal::file_input attr_accessor model: OpenAI::Models::Audio::TranslationCreateParams::model @@ -34,7 +34,7 @@ module OpenAI def temperature=: (Float) -> Float def initialize: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranslationCreateParams::model, ?prompt: String, ?response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, @@ -42,6 +42,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + file: OpenAI::Internal::file_input, + model: OpenAI::Models::Audio::TranslationCreateParams::model, + prompt: String, + response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, + temperature: Float, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::audio_model module Model diff --git a/sig/openai/models/audio/translation_verbose.rbs b/sig/openai/models/audio/translation_verbose.rbs index 20796402..cac25f84 100644 --- a/sig/openai/models/audio/translation_verbose.rbs +++ b/sig/openai/models/audio/translation_verbose.rbs @@ -28,6 +28,13 @@ module OpenAI text: String, ?segments: ::Array[OpenAI::Audio::TranscriptionSegment] ) -> void + + def to_hash: -> { + duration: Float, + language: String, + text: String, + segments: ::Array[OpenAI::Audio::TranscriptionSegment] + } end end end diff --git a/sig/openai/models/auto_file_chunking_strategy_param.rbs b/sig/openai/models/auto_file_chunking_strategy_param.rbs index 81e99fa2..54aeed24 100644 --- a/sig/openai/models/auto_file_chunking_strategy_param.rbs +++ b/sig/openai/models/auto_file_chunking_strategy_param.rbs @@ -6,6 +6,8 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void + + def to_hash: -> { type: :auto } end end end diff --git a/sig/openai/models/batch.rbs b/sig/openai/models/batch.rbs index 702f7dff..cdba6cdc 100644 --- a/sig/openai/models/batch.rbs +++ b/sig/openai/models/batch.rbs @@ -8,7 +8,7 @@ module OpenAI endpoint: String, input_file_id: String, object: :batch, - status: OpenAI::Batch::status, + status: OpenAI::Models::Batch::status, cancelled_at: Integer, cancelling_at: Integer, completed_at: Integer, @@ -37,7 +37,7 @@ module OpenAI attr_accessor object: :batch - attr_accessor status: OpenAI::Batch::status + attr_accessor status: OpenAI::Models::Batch::status attr_reader cancelled_at: Integer? @@ -97,7 +97,7 @@ module OpenAI created_at: Integer, endpoint: String, input_file_id: String, - status: OpenAI::Batch::status, + status: OpenAI::Models::Batch::status, ?cancelled_at: Integer, ?cancelling_at: Integer, ?completed_at: Integer, @@ -114,6 +114,29 @@ module OpenAI ?object: :batch ) -> void + def to_hash: -> { + id: String, + completion_window: String, + created_at: Integer, + endpoint: String, + input_file_id: String, + object: :batch, + status: OpenAI::Models::Batch::status, + cancelled_at: Integer, + cancelling_at: Integer, + completed_at: Integer, + error_file_id: String, + errors: OpenAI::Batch::Errors, + expired_at: Integer, + expires_at: Integer, + failed_at: Integer, + finalizing_at: Integer, + in_progress_at: Integer, + metadata: OpenAI::Models::metadata?, + output_file_id: String, + request_counts: OpenAI::BatchRequestCounts + } + type status = :validating | :failed @@ -136,7 +159,7 @@ module OpenAI CANCELLING: :cancelling CANCELLED: :cancelled - def self?.values: -> ::Array[OpenAI::Batch::status] + def self?.values: -> ::Array[OpenAI::Models::Batch::status] end type errors = { data: ::Array[OpenAI::BatchError], object: String } @@ -154,6 +177,8 @@ module OpenAI ?data: ::Array[OpenAI::BatchError], ?object: String ) -> void + + def to_hash: -> { data: ::Array[OpenAI::BatchError], object: String } end end end diff --git a/sig/openai/models/batch_cancel_params.rbs b/sig/openai/models/batch_cancel_params.rbs index 9b655647..944d9c29 100644 --- a/sig/openai/models/batch_cancel_params.rbs +++ b/sig/openai/models/batch_cancel_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/batch_create_params.rbs b/sig/openai/models/batch_create_params.rbs index b5519cea..85193e35 100644 --- a/sig/openai/models/batch_create_params.rbs +++ b/sig/openai/models/batch_create_params.rbs @@ -29,6 +29,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + completion_window: OpenAI::Models::BatchCreateParams::completion_window, + endpoint: OpenAI::Models::BatchCreateParams::endpoint, + input_file_id: String, + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } + type completion_window = :"24h" module CompletionWindow diff --git a/sig/openai/models/batch_error.rbs b/sig/openai/models/batch_error.rbs index 07c43d31..028bc05a 100644 --- a/sig/openai/models/batch_error.rbs +++ b/sig/openai/models/batch_error.rbs @@ -22,6 +22,13 @@ module OpenAI ?message: String, ?param: String? ) -> void + + def to_hash: -> { + code: String, + line: Integer?, + message: String, + param: String? + } end end end diff --git a/sig/openai/models/batch_list_params.rbs b/sig/openai/models/batch_list_params.rbs index 9f1b2961..59ce8fb7 100644 --- a/sig/openai/models/batch_list_params.rbs +++ b/sig/openai/models/batch_list_params.rbs @@ -21,6 +21,12 @@ module OpenAI ?limit: Integer, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + after: String, + limit: Integer, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/batch_request_counts.rbs b/sig/openai/models/batch_request_counts.rbs index 38d29256..d8013508 100644 --- a/sig/openai/models/batch_request_counts.rbs +++ b/sig/openai/models/batch_request_counts.rbs @@ -15,6 +15,8 @@ module OpenAI failed: Integer, total: Integer ) -> void + + def to_hash: -> { completed: Integer, failed: Integer, total: Integer } end end end diff --git a/sig/openai/models/batch_retrieve_params.rbs b/sig/openai/models/batch_retrieve_params.rbs index b1deb5c3..232d9ed7 100644 --- a/sig/openai/models/batch_retrieve_params.rbs +++ b/sig/openai/models/batch_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/beta/assistant.rbs b/sig/openai/models/beta/assistant.rbs index 867ab8e5..3128e9ba 100644 --- a/sig/openai/models/beta/assistant.rbs +++ b/sig/openai/models/beta/assistant.rbs @@ -61,6 +61,22 @@ module OpenAI ?object: :assistant ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + description: String?, + instructions: String?, + metadata: OpenAI::Models::metadata?, + model: String, + name: String?, + object: :assistant, + tools: ::Array[OpenAI::Models::Beta::assistant_tool], + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + temperature: Float?, + tool_resources: OpenAI::Beta::Assistant::ToolResources?, + top_p: Float? + } + type tool_resources = { code_interpreter: OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, @@ -85,6 +101,11 @@ module OpenAI ?file_search: OpenAI::Beta::Assistant::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::Assistant::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::Assistant::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -93,6 +114,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = { vector_store_ids: ::Array[String] } @@ -103,6 +126,8 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void + + def to_hash: -> { vector_store_ids: ::Array[String] } end end end diff --git a/sig/openai/models/beta/assistant_create_params.rbs b/sig/openai/models/beta/assistant_create_params.rbs index 574c22a2..f4bdd80f 100644 --- a/sig/openai/models/beta/assistant_create_params.rbs +++ b/sig/openai/models/beta/assistant_create_params.rbs @@ -62,6 +62,21 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + model: OpenAI::Models::Beta::AssistantCreateParams::model, + description: String?, + instructions: String?, + metadata: OpenAI::Models::metadata?, + name: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + temperature: Float?, + tool_resources: OpenAI::Beta::AssistantCreateParams::ToolResources?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool], + top_p: Float?, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::chat_model module Model @@ -94,6 +109,11 @@ module OpenAI ?file_search: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -102,6 +122,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = @@ -126,19 +148,24 @@ module OpenAI ?vector_stores: ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] ) -> void + def to_hash: -> { + vector_store_ids: ::Array[String], + vector_stores: ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] + } + type vector_store = { - chunking_strategy: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, file_ids: ::Array[String], metadata: OpenAI::Models::metadata? } class VectorStore < OpenAI::Internal::Type::BaseModel - attr_reader chunking_strategy: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? + attr_reader chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( - OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy - ) -> OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + ) -> OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy attr_reader file_ids: ::Array[String]? @@ -147,11 +174,17 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - ?chunking_strategy: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + chunking_strategy: OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + file_ids: ::Array[String], + metadata: OpenAI::Models::metadata? + } + type chunking_strategy = OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto | OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static @@ -165,6 +198,8 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void + + def to_hash: -> { type: :auto } end type static = @@ -183,6 +218,11 @@ module OpenAI ?type: :static ) -> void + def to_hash: -> { + static: OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + type: :static + } + type static = { chunk_overlap_tokens: Integer, @@ -198,10 +238,15 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void + + def to_hash: -> { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } end end - def self?.variants: -> ::Array[OpenAI::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy] + def self?.variants: -> ::Array[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy] end end end diff --git a/sig/openai/models/beta/assistant_delete_params.rbs b/sig/openai/models/beta/assistant_delete_params.rbs index e429cf3e..e476dced 100644 --- a/sig/openai/models/beta/assistant_delete_params.rbs +++ b/sig/openai/models/beta/assistant_delete_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/beta/assistant_deleted.rbs b/sig/openai/models/beta/assistant_deleted.rbs index 5ea06673..f5fe869a 100644 --- a/sig/openai/models/beta/assistant_deleted.rbs +++ b/sig/openai/models/beta/assistant_deleted.rbs @@ -16,6 +16,12 @@ module OpenAI deleted: bool, ?object: :"assistant.deleted" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"assistant.deleted" + } end end end diff --git a/sig/openai/models/beta/assistant_list_params.rbs b/sig/openai/models/beta/assistant_list_params.rbs index 252e6b46..d9fea65e 100644 --- a/sig/openai/models/beta/assistant_list_params.rbs +++ b/sig/openai/models/beta/assistant_list_params.rbs @@ -40,6 +40,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + limit: Integer, + order: OpenAI::Models::Beta::AssistantListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/assistant_retrieve_params.rbs b/sig/openai/models/beta/assistant_retrieve_params.rbs index a1fec037..be66a758 100644 --- a/sig/openai/models/beta/assistant_retrieve_params.rbs +++ b/sig/openai/models/beta/assistant_retrieve_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/beta/assistant_stream_event.rbs b/sig/openai/models/beta/assistant_stream_event.rbs index e0797c37..0852c3c3 100644 --- a/sig/openai/models/beta/assistant_stream_event.rbs +++ b/sig/openai/models/beta/assistant_stream_event.rbs @@ -51,6 +51,12 @@ module OpenAI ?enabled: bool, ?event: :"thread.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Thread, + event: :"thread.created", + enabled: bool + } end type thread_run_created = @@ -65,6 +71,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.created" + } end type thread_run_queued = @@ -79,6 +90,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.queued" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.queued" + } end type thread_run_in_progress = @@ -93,6 +109,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.in_progress" + } end type thread_run_requires_action = @@ -110,6 +131,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.requires_action" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.requires_action" + } end type thread_run_completed = @@ -124,6 +150,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.completed" + } end type thread_run_incomplete = @@ -138,6 +169,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.incomplete" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.incomplete" + } end type thread_run_failed = @@ -152,6 +188,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.failed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.failed" + } end type thread_run_cancelling = @@ -166,6 +207,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelling" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.cancelling" + } end type thread_run_cancelled = @@ -180,6 +226,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelled" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.cancelled" + } end type thread_run_expired = @@ -194,6 +245,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.expired" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.expired" + } end type thread_run_step_created = @@ -211,6 +267,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.created" + } end type thread_run_step_in_progress = @@ -228,6 +289,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.in_progress" + } end type thread_run_step_delta = @@ -245,6 +311,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, ?event: :"thread.run.step.delta" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, + event: :"thread.run.step.delta" + } end type thread_run_step_completed = @@ -262,6 +333,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.completed" + } end type thread_run_step_failed = @@ -279,6 +355,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.failed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.failed" + } end type thread_run_step_cancelled = @@ -296,6 +377,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.cancelled" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.cancelled" + } end type thread_run_step_expired = @@ -313,6 +399,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.expired" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.expired" + } end type thread_message_created = @@ -330,6 +421,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.created" + } end type thread_message_in_progress = @@ -347,6 +443,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.in_progress" + } end type thread_message_delta = @@ -364,6 +465,11 @@ module OpenAI data: OpenAI::Beta::Threads::MessageDeltaEvent, ?event: :"thread.message.delta" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::MessageDeltaEvent, + event: :"thread.message.delta" + } end type thread_message_completed = @@ -381,6 +487,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.completed" + } end type thread_message_incomplete = @@ -398,6 +509,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.incomplete" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.incomplete" + } end type error_event = { data: OpenAI::ErrorObject, event: :error } @@ -408,6 +524,8 @@ module OpenAI attr_accessor event: :error def initialize: (data: OpenAI::ErrorObject, ?event: :error) -> void + + def to_hash: -> { data: OpenAI::ErrorObject, event: :error } end def self?.variants: -> ::Array[OpenAI::Models::Beta::assistant_stream_event] diff --git a/sig/openai/models/beta/assistant_tool_choice.rbs b/sig/openai/models/beta/assistant_tool_choice.rbs index 526b290d..1d80009b 100644 --- a/sig/openai/models/beta/assistant_tool_choice.rbs +++ b/sig/openai/models/beta/assistant_tool_choice.rbs @@ -3,12 +3,12 @@ module OpenAI module Beta type assistant_tool_choice = { - type: OpenAI::Beta::AssistantToolChoice::type_, + type: OpenAI::Models::Beta::AssistantToolChoice::type_, function: OpenAI::Beta::AssistantToolChoiceFunction } class AssistantToolChoice < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Beta::AssistantToolChoice::type_ + attr_accessor type: OpenAI::Models::Beta::AssistantToolChoice::type_ attr_reader function: OpenAI::Beta::AssistantToolChoiceFunction? @@ -17,10 +17,15 @@ module OpenAI ) -> OpenAI::Beta::AssistantToolChoiceFunction def initialize: ( - type: OpenAI::Beta::AssistantToolChoice::type_, + type: OpenAI::Models::Beta::AssistantToolChoice::type_, ?function: OpenAI::Beta::AssistantToolChoiceFunction ) -> void + def to_hash: -> { + type: OpenAI::Models::Beta::AssistantToolChoice::type_, + function: OpenAI::Beta::AssistantToolChoiceFunction + } + type type_ = :function | :code_interpreter | :file_search module Type @@ -30,7 +35,7 @@ module OpenAI CODE_INTERPRETER: :code_interpreter FILE_SEARCH: :file_search - def self?.values: -> ::Array[OpenAI::Beta::AssistantToolChoice::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoice::type_] end end end diff --git a/sig/openai/models/beta/assistant_tool_choice_function.rbs b/sig/openai/models/beta/assistant_tool_choice_function.rbs index 36f0983c..b97b9891 100644 --- a/sig/openai/models/beta/assistant_tool_choice_function.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_function.rbs @@ -7,6 +7,8 @@ module OpenAI attr_accessor name: String def initialize: (name: String) -> void + + def to_hash: -> { name: String } end end end diff --git a/sig/openai/models/beta/assistant_tool_choice_option.rbs b/sig/openai/models/beta/assistant_tool_choice_option.rbs index ddd66365..e879d0d5 100644 --- a/sig/openai/models/beta/assistant_tool_choice_option.rbs +++ b/sig/openai/models/beta/assistant_tool_choice_option.rbs @@ -2,7 +2,7 @@ module OpenAI module Models module Beta type assistant_tool_choice_option = - OpenAI::Beta::AssistantToolChoiceOption::auto + OpenAI::Models::Beta::AssistantToolChoiceOption::auto | OpenAI::Beta::AssistantToolChoice module AssistantToolChoiceOption @@ -17,7 +17,7 @@ module OpenAI AUTO: :auto REQUIRED: :required - def self?.values: -> ::Array[OpenAI::Beta::AssistantToolChoiceOption::auto] + def self?.values: -> ::Array[OpenAI::Models::Beta::AssistantToolChoiceOption::auto] end def self?.variants: -> ::Array[OpenAI::Models::Beta::assistant_tool_choice_option] diff --git a/sig/openai/models/beta/assistant_update_params.rbs b/sig/openai/models/beta/assistant_update_params.rbs index de493080..6ee6405d 100644 --- a/sig/openai/models/beta/assistant_update_params.rbs +++ b/sig/openai/models/beta/assistant_update_params.rbs @@ -66,6 +66,21 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + description: String?, + instructions: String?, + metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::Beta::AssistantUpdateParams::model, + name: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + temperature: Float?, + tool_resources: OpenAI::Beta::AssistantUpdateParams::ToolResources?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool], + top_p: Float?, + request_options: OpenAI::RequestOptions + } + type model = String | :"gpt-4.1" @@ -172,6 +187,11 @@ module OpenAI ?file_search: OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::AssistantUpdateParams::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -180,6 +200,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = { vector_store_ids: ::Array[String] } @@ -190,6 +212,8 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void + + def to_hash: -> { vector_store_ids: ::Array[String] } end end end diff --git a/sig/openai/models/beta/code_interpreter_tool.rbs b/sig/openai/models/beta/code_interpreter_tool.rbs index 84b353d0..bd9a6760 100644 --- a/sig/openai/models/beta/code_interpreter_tool.rbs +++ b/sig/openai/models/beta/code_interpreter_tool.rbs @@ -7,6 +7,8 @@ module OpenAI attr_accessor type: :code_interpreter def initialize: (?type: :code_interpreter) -> void + + def to_hash: -> { type: :code_interpreter } end end end diff --git a/sig/openai/models/beta/file_search_tool.rbs b/sig/openai/models/beta/file_search_tool.rbs index 9e544ea9..85f54d10 100644 --- a/sig/openai/models/beta/file_search_tool.rbs +++ b/sig/openai/models/beta/file_search_tool.rbs @@ -21,6 +21,11 @@ module OpenAI ?type: :file_search ) -> void + def to_hash: -> { + type: :file_search, + file_search: OpenAI::Beta::FileSearchTool::FileSearch + } + type file_search = { max_num_results: Integer, @@ -43,26 +48,36 @@ module OpenAI ?ranking_options: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions ) -> void + def to_hash: -> { + max_num_results: Integer, + ranking_options: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions + } + type ranking_options = { score_threshold: Float, - ranker: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker } class RankingOptions < OpenAI::Internal::Type::BaseModel attr_accessor score_threshold: Float - attr_reader ranker: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker? + attr_reader ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker? def ranker=: ( - OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker - ) -> OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ) -> OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker def initialize: ( score_threshold: Float, - ?ranker: OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + ?ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker ) -> void + def to_hash: -> { + score_threshold: Float, + ranker: OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker + } + type ranker = :auto | :default_2024_08_21 module Ranker @@ -71,7 +86,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 - def self?.values: -> ::Array[OpenAI::Beta::FileSearchTool::FileSearch::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::ranker] end end end diff --git a/sig/openai/models/beta/function_tool.rbs b/sig/openai/models/beta/function_tool.rbs index d93fda7c..8798cc58 100644 --- a/sig/openai/models/beta/function_tool.rbs +++ b/sig/openai/models/beta/function_tool.rbs @@ -13,6 +13,11 @@ module OpenAI function: OpenAI::FunctionDefinition, ?type: :function ) -> void + + def to_hash: -> { + function: OpenAI::FunctionDefinition, + type: :function + } end end end diff --git a/sig/openai/models/beta/message_stream_event.rbs b/sig/openai/models/beta/message_stream_event.rbs index e13d605b..eb4aed3e 100644 --- a/sig/openai/models/beta/message_stream_event.rbs +++ b/sig/openai/models/beta/message_stream_event.rbs @@ -26,6 +26,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.created" + } end type thread_message_in_progress = @@ -43,6 +48,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.in_progress" + } end type thread_message_delta = @@ -60,6 +70,11 @@ module OpenAI data: OpenAI::Beta::Threads::MessageDeltaEvent, ?event: :"thread.message.delta" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::MessageDeltaEvent, + event: :"thread.message.delta" + } end type thread_message_completed = @@ -77,6 +92,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.completed" + } end type thread_message_incomplete = @@ -94,6 +114,11 @@ module OpenAI data: OpenAI::Beta::Threads::Message, ?event: :"thread.message.incomplete" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Message, + event: :"thread.message.incomplete" + } end def self?.variants: -> ::Array[OpenAI::Models::Beta::message_stream_event] diff --git a/sig/openai/models/beta/run_step_stream_event.rbs b/sig/openai/models/beta/run_step_stream_event.rbs index 3b292e7c..cf3454d9 100644 --- a/sig/openai/models/beta/run_step_stream_event.rbs +++ b/sig/openai/models/beta/run_step_stream_event.rbs @@ -28,6 +28,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.created" + } end type thread_run_step_in_progress = @@ -45,6 +50,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.in_progress" + } end type thread_run_step_delta = @@ -62,6 +72,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, ?event: :"thread.run.step.delta" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStepDeltaEvent, + event: :"thread.run.step.delta" + } end type thread_run_step_completed = @@ -79,6 +94,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.completed" + } end type thread_run_step_failed = @@ -96,6 +116,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.failed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.failed" + } end type thread_run_step_cancelled = @@ -113,6 +138,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.cancelled" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.cancelled" + } end type thread_run_step_expired = @@ -130,6 +160,11 @@ module OpenAI data: OpenAI::Beta::Threads::Runs::RunStep, ?event: :"thread.run.step.expired" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Runs::RunStep, + event: :"thread.run.step.expired" + } end def self?.variants: -> ::Array[OpenAI::Models::Beta::run_step_stream_event] diff --git a/sig/openai/models/beta/run_stream_event.rbs b/sig/openai/models/beta/run_stream_event.rbs index e1c4d276..4bffb3ac 100644 --- a/sig/openai/models/beta/run_stream_event.rbs +++ b/sig/openai/models/beta/run_stream_event.rbs @@ -28,6 +28,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.created" + } end type thread_run_queued = @@ -42,6 +47,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.queued" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.queued" + } end type thread_run_in_progress = @@ -56,6 +66,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.in_progress" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.in_progress" + } end type thread_run_requires_action = @@ -73,6 +88,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.requires_action" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.requires_action" + } end type thread_run_completed = @@ -87,6 +107,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.completed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.completed" + } end type thread_run_incomplete = @@ -101,6 +126,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.incomplete" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.incomplete" + } end type thread_run_failed = @@ -115,6 +145,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.failed" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.failed" + } end type thread_run_cancelling = @@ -129,6 +164,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelling" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.cancelling" + } end type thread_run_cancelled = @@ -143,6 +183,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.cancelled" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.cancelled" + } end type thread_run_expired = @@ -157,6 +202,11 @@ module OpenAI data: OpenAI::Beta::Threads::Run, ?event: :"thread.run.expired" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Threads::Run, + event: :"thread.run.expired" + } end def self?.variants: -> ::Array[OpenAI::Models::Beta::run_stream_event] diff --git a/sig/openai/models/beta/thread.rbs b/sig/openai/models/beta/thread.rbs index c934342f..98eb5490 100644 --- a/sig/openai/models/beta/thread.rbs +++ b/sig/openai/models/beta/thread.rbs @@ -29,6 +29,14 @@ module OpenAI ?object: :thread ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + metadata: OpenAI::Models::metadata?, + object: :thread, + tool_resources: OpenAI::Beta::Thread::ToolResources? + } + type tool_resources = { code_interpreter: OpenAI::Beta::Thread::ToolResources::CodeInterpreter, @@ -53,6 +61,11 @@ module OpenAI ?file_search: OpenAI::Beta::Thread::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::Thread::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::Thread::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -61,6 +74,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = { vector_store_ids: ::Array[String] } @@ -71,6 +86,8 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void + + def to_hash: -> { vector_store_ids: ::Array[String] } end end end diff --git a/sig/openai/models/beta/thread_create_and_run_params.rbs b/sig/openai/models/beta/thread_create_and_run_params.rbs index 406904b2..4dd4c103 100644 --- a/sig/openai/models/beta/thread_create_and_run_params.rbs +++ b/sig/openai/models/beta/thread_create_and_run_params.rbs @@ -17,7 +17,7 @@ module OpenAI tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, top_p: Float?, - truncation_strategy: OpenAI::Beta::TruncationObject? + truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy? } & OpenAI::Internal::Type::request_parameters @@ -59,7 +59,7 @@ module OpenAI attr_accessor top_p: Float? - attr_accessor truncation_strategy: OpenAI::Beta::TruncationObject? + attr_accessor truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy? def initialize: ( assistant_id: String, @@ -76,10 +76,29 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + assistant_id: String, + instructions: String?, + max_completion_tokens: Integer?, + max_prompt_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::Beta::ThreadCreateAndRunParams::model?, + parallel_tool_calls: bool, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + temperature: Float?, + thread: OpenAI::Beta::ThreadCreateAndRunParams::Thread, + tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, + top_p: Float?, + truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::chat_model module Model @@ -112,30 +131,43 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources? ) -> void + def to_hash: -> { + messages: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message], + metadata: OpenAI::Models::metadata?, + tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources? + } + type message = { - content: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content, - role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role, + content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, metadata: OpenAI::Models::metadata? } class Message < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content + attr_accessor content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content - attr_accessor role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role + attr_accessor role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role attr_accessor attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]? attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - content: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content, - role: OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role, + content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, ?attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + content: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content, + role: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role, + attachments: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment]?, + metadata: OpenAI::Models::metadata? + } + type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -143,7 +175,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::content] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::content] MessageContentPartParamArray: OpenAI::Internal::Type::Converter end @@ -156,13 +188,13 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::role] end type attachment = { file_id: String, - tools: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -170,17 +202,22 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] - ) -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ) -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] ) -> void + def to_hash: -> { + file_id: String, + tools: ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + } + type tool = OpenAI::Beta::CodeInterpreterTool | OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch @@ -194,9 +231,11 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void + + def to_hash: -> { type: :file_search } end - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::tool] end end end @@ -225,6 +264,11 @@ module OpenAI ?file_search: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -233,6 +277,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = @@ -257,19 +303,24 @@ module OpenAI ?vector_stores: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] ) -> void + def to_hash: -> { + vector_store_ids: ::Array[String], + vector_stores: ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] + } + type vector_store = { - chunking_strategy: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, + chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, file_ids: ::Array[String], metadata: OpenAI::Models::metadata? } class VectorStore < OpenAI::Internal::Type::BaseModel - attr_reader chunking_strategy: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy? + attr_reader chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( - OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy - ) -> OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy + OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy + ) -> OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy attr_reader file_ids: ::Array[String]? @@ -278,11 +329,17 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - ?chunking_strategy: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + chunking_strategy: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy, + file_ids: ::Array[String], + metadata: OpenAI::Models::metadata? + } + type chunking_strategy = OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto | OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static @@ -296,6 +353,8 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void + + def to_hash: -> { type: :auto } end type static = @@ -314,6 +373,11 @@ module OpenAI ?type: :static ) -> void + def to_hash: -> { + static: OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + type: :static + } + type static = { chunk_overlap_tokens: Integer, @@ -329,10 +393,15 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void + + def to_hash: -> { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } end end - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::chunking_strategy] end end end @@ -363,6 +432,11 @@ module OpenAI ?file_search: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -371,6 +445,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = { vector_store_ids: ::Array[String] } @@ -381,6 +457,41 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void + + def to_hash: -> { vector_store_ids: ::Array[String] } + end + end + + type truncation_strategy = + { + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + last_messages: Integer? + } + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + attr_accessor type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_ + + attr_accessor last_messages: Integer? + + def initialize: ( + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + ?last_messages: Integer? + ) -> void + + def to_hash: -> { + type: OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_, + last_messages: Integer? + } + + type type_ = :auto | :last_messages + + module Type + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + LAST_MESSAGES: :last_messages + + def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::type_] end end end diff --git a/sig/openai/models/beta/thread_create_params.rbs b/sig/openai/models/beta/thread_create_params.rbs index d3593936..e8d03f9c 100644 --- a/sig/openai/models/beta/thread_create_params.rbs +++ b/sig/openai/models/beta/thread_create_params.rbs @@ -30,30 +30,44 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + messages: ::Array[OpenAI::Beta::ThreadCreateParams::Message], + metadata: OpenAI::Models::metadata?, + tool_resources: OpenAI::Beta::ThreadCreateParams::ToolResources?, + request_options: OpenAI::RequestOptions + } + type message = { - content: OpenAI::Beta::ThreadCreateParams::Message::content, - role: OpenAI::Beta::ThreadCreateParams::Message::role, + content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]?, metadata: OpenAI::Models::metadata? } class Message < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Beta::ThreadCreateParams::Message::content + attr_accessor content: OpenAI::Models::Beta::ThreadCreateParams::Message::content - attr_accessor role: OpenAI::Beta::ThreadCreateParams::Message::role + attr_accessor role: OpenAI::Models::Beta::ThreadCreateParams::Message::role attr_accessor attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]? attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - content: OpenAI::Beta::ThreadCreateParams::Message::content, - role: OpenAI::Beta::ThreadCreateParams::Message::role, + content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, ?attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]?, ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + content: OpenAI::Models::Beta::ThreadCreateParams::Message::content, + role: OpenAI::Models::Beta::ThreadCreateParams::Message::role, + attachments: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment]?, + metadata: OpenAI::Models::metadata? + } + type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -61,7 +75,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::content] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::content] MessageContentPartParamArray: OpenAI::Internal::Type::Converter end @@ -74,13 +88,13 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::role] end type attachment = { file_id: String, - tools: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] + tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -88,17 +102,22 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] - ) -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] + ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] + ) -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] + ?tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] ) -> void + def to_hash: -> { + file_id: String, + tools: ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] + } + type tool = OpenAI::Beta::CodeInterpreterTool | OpenAI::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch @@ -112,9 +131,11 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void + + def to_hash: -> { type: :file_search } end - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateParams::Message::Attachment::tool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::tool] end end end @@ -143,6 +164,11 @@ module OpenAI ?file_search: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -151,6 +177,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = @@ -175,19 +203,24 @@ module OpenAI ?vector_stores: ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] ) -> void + def to_hash: -> { + vector_store_ids: ::Array[String], + vector_stores: ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] + } + type vector_store = { - chunking_strategy: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, file_ids: ::Array[String], metadata: OpenAI::Models::metadata? } class VectorStore < OpenAI::Internal::Type::BaseModel - attr_reader chunking_strategy: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? + attr_reader chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy? def chunking_strategy=: ( - OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy - ) -> OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy + ) -> OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy attr_reader file_ids: ::Array[String]? @@ -196,11 +229,17 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - ?chunking_strategy: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + ?chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + chunking_strategy: OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy, + file_ids: ::Array[String], + metadata: OpenAI::Models::metadata? + } + type chunking_strategy = OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto | OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static @@ -214,6 +253,8 @@ module OpenAI attr_accessor type: :auto def initialize: (?type: :auto) -> void + + def to_hash: -> { type: :auto } end type static = @@ -232,6 +273,11 @@ module OpenAI ?type: :static ) -> void + def to_hash: -> { + static: OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static, + type: :static + } + type static = { chunk_overlap_tokens: Integer, @@ -247,10 +293,15 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void + + def to_hash: -> { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } end end - def self?.variants: -> ::Array[OpenAI::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy] + def self?.variants: -> ::Array[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::chunking_strategy] end end end diff --git a/sig/openai/models/beta/thread_delete_params.rbs b/sig/openai/models/beta/thread_delete_params.rbs index fa242461..c33f5354 100644 --- a/sig/openai/models/beta/thread_delete_params.rbs +++ b/sig/openai/models/beta/thread_delete_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/beta/thread_deleted.rbs b/sig/openai/models/beta/thread_deleted.rbs index 9ca391a5..c43c132b 100644 --- a/sig/openai/models/beta/thread_deleted.rbs +++ b/sig/openai/models/beta/thread_deleted.rbs @@ -16,6 +16,8 @@ module OpenAI deleted: bool, ?object: :"thread.deleted" ) -> void + + def to_hash: -> { id: String, deleted: bool, object: :"thread.deleted" } end end end diff --git a/sig/openai/models/beta/thread_retrieve_params.rbs b/sig/openai/models/beta/thread_retrieve_params.rbs index 90b81360..dd6b78cf 100644 --- a/sig/openai/models/beta/thread_retrieve_params.rbs +++ b/sig/openai/models/beta/thread_retrieve_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/beta/thread_stream_event.rbs b/sig/openai/models/beta/thread_stream_event.rbs index 01af4a19..9c8af3b5 100644 --- a/sig/openai/models/beta/thread_stream_event.rbs +++ b/sig/openai/models/beta/thread_stream_event.rbs @@ -18,6 +18,12 @@ module OpenAI ?enabled: bool, ?event: :"thread.created" ) -> void + + def to_hash: -> { + data: OpenAI::Beta::Thread, + event: :"thread.created", + enabled: bool + } end end end diff --git a/sig/openai/models/beta/thread_update_params.rbs b/sig/openai/models/beta/thread_update_params.rbs index 98253f4e..d5d3375e 100644 --- a/sig/openai/models/beta/thread_update_params.rbs +++ b/sig/openai/models/beta/thread_update_params.rbs @@ -22,6 +22,12 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + metadata: OpenAI::Models::metadata?, + tool_resources: OpenAI::Beta::ThreadUpdateParams::ToolResources?, + request_options: OpenAI::RequestOptions + } + type tool_resources = { code_interpreter: OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, @@ -46,6 +52,11 @@ module OpenAI ?file_search: OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch ) -> void + def to_hash: -> { + code_interpreter: OpenAI::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, + file_search: OpenAI::Beta::ThreadUpdateParams::ToolResources::FileSearch + } + type code_interpreter = { file_ids: ::Array[String] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -54,6 +65,8 @@ module OpenAI def file_ids=: (::Array[String]) -> ::Array[String] def initialize: (?file_ids: ::Array[String]) -> void + + def to_hash: -> { file_ids: ::Array[String] } end type file_search = { vector_store_ids: ::Array[String] } @@ -64,6 +77,8 @@ module OpenAI def vector_store_ids=: (::Array[String]) -> ::Array[String] def initialize: (?vector_store_ids: ::Array[String]) -> void + + def to_hash: -> { vector_store_ids: ::Array[String] } end end end diff --git a/sig/openai/models/beta/threads/file_citation_annotation.rbs b/sig/openai/models/beta/threads/file_citation_annotation.rbs index 424350aa..9b224788 100644 --- a/sig/openai/models/beta/threads/file_citation_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_annotation.rbs @@ -30,12 +30,22 @@ module OpenAI ?type: :file_citation ) -> void + def to_hash: -> { + end_index: Integer, + file_citation: OpenAI::Beta::Threads::FileCitationAnnotation::FileCitation, + start_index: Integer, + text: String, + type: :file_citation + } + type file_citation = { file_id: String } class FileCitation < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void + + def to_hash: -> { file_id: String } end end end diff --git a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs index ba8e1399..8bb06279 100644 --- a/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_citation_delta_annotation.rbs @@ -44,6 +44,15 @@ module OpenAI ?type: :file_citation ) -> void + def to_hash: -> { + index: Integer, + type: :file_citation, + end_index: Integer, + file_citation: OpenAI::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, + start_index: Integer, + text: String + } + type file_citation = { file_id: String, quote: String } class FileCitation < OpenAI::Internal::Type::BaseModel @@ -56,6 +65,8 @@ module OpenAI def quote=: (String) -> String def initialize: (?file_id: String, ?quote: String) -> void + + def to_hash: -> { file_id: String, quote: String } end end end diff --git a/sig/openai/models/beta/threads/file_path_annotation.rbs b/sig/openai/models/beta/threads/file_path_annotation.rbs index 95a8cd5d..95d59fc3 100644 --- a/sig/openai/models/beta/threads/file_path_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_annotation.rbs @@ -30,12 +30,22 @@ module OpenAI ?type: :file_path ) -> void + def to_hash: -> { + end_index: Integer, + file_path: OpenAI::Beta::Threads::FilePathAnnotation::FilePath, + start_index: Integer, + text: String, + type: :file_path + } + type file_path = { file_id: String } class FilePath < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void + + def to_hash: -> { file_id: String } end end end diff --git a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs index c75d696a..8bac30d8 100644 --- a/sig/openai/models/beta/threads/file_path_delta_annotation.rbs +++ b/sig/openai/models/beta/threads/file_path_delta_annotation.rbs @@ -44,6 +44,15 @@ module OpenAI ?type: :file_path ) -> void + def to_hash: -> { + index: Integer, + type: :file_path, + end_index: Integer, + file_path: OpenAI::Beta::Threads::FilePathDeltaAnnotation::FilePath, + start_index: Integer, + text: String + } + type file_path = { file_id: String } class FilePath < OpenAI::Internal::Type::BaseModel @@ -52,6 +61,8 @@ module OpenAI def file_id=: (String) -> String def initialize: (?file_id: String) -> void + + def to_hash: -> { file_id: String } end end end diff --git a/sig/openai/models/beta/threads/image_file.rbs b/sig/openai/models/beta/threads/image_file.rbs index 38e77c68..6b59968c 100644 --- a/sig/openai/models/beta/threads/image_file.rbs +++ b/sig/openai/models/beta/threads/image_file.rbs @@ -3,22 +3,30 @@ module OpenAI module Beta module Threads type image_file = - { file_id: String, detail: OpenAI::Beta::Threads::ImageFile::detail } + { + file_id: String, + detail: OpenAI::Models::Beta::Threads::ImageFile::detail + } class ImageFile < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String - attr_reader detail: OpenAI::Beta::Threads::ImageFile::detail? + attr_reader detail: OpenAI::Models::Beta::Threads::ImageFile::detail? def detail=: ( - OpenAI::Beta::Threads::ImageFile::detail - ) -> OpenAI::Beta::Threads::ImageFile::detail + OpenAI::Models::Beta::Threads::ImageFile::detail + ) -> OpenAI::Models::Beta::Threads::ImageFile::detail def initialize: ( file_id: String, - ?detail: OpenAI::Beta::Threads::ImageFile::detail + ?detail: OpenAI::Models::Beta::Threads::ImageFile::detail ) -> void + def to_hash: -> { + file_id: String, + detail: OpenAI::Models::Beta::Threads::ImageFile::detail + } + type detail = :auto | :low | :high module Detail @@ -28,7 +36,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageFile::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageFile::detail] end end end diff --git a/sig/openai/models/beta/threads/image_file_content_block.rbs b/sig/openai/models/beta/threads/image_file_content_block.rbs index 103b48ac..5b91259d 100644 --- a/sig/openai/models/beta/threads/image_file_content_block.rbs +++ b/sig/openai/models/beta/threads/image_file_content_block.rbs @@ -14,6 +14,11 @@ module OpenAI image_file: OpenAI::Beta::Threads::ImageFile, ?type: :image_file ) -> void + + def to_hash: -> { + image_file: OpenAI::Beta::Threads::ImageFile, + type: :image_file + } end end end diff --git a/sig/openai/models/beta/threads/image_file_delta.rbs b/sig/openai/models/beta/threads/image_file_delta.rbs index 5ed4a435..2f0784dd 100644 --- a/sig/openai/models/beta/threads/image_file_delta.rbs +++ b/sig/openai/models/beta/threads/image_file_delta.rbs @@ -4,26 +4,31 @@ module OpenAI module Threads type image_file_delta = { - detail: OpenAI::Beta::Threads::ImageFileDelta::detail, + detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, file_id: String } class ImageFileDelta < OpenAI::Internal::Type::BaseModel - attr_reader detail: OpenAI::Beta::Threads::ImageFileDelta::detail? + attr_reader detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail? def detail=: ( - OpenAI::Beta::Threads::ImageFileDelta::detail - ) -> OpenAI::Beta::Threads::ImageFileDelta::detail + OpenAI::Models::Beta::Threads::ImageFileDelta::detail + ) -> OpenAI::Models::Beta::Threads::ImageFileDelta::detail attr_reader file_id: String? def file_id=: (String) -> String def initialize: ( - ?detail: OpenAI::Beta::Threads::ImageFileDelta::detail, + ?detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, ?file_id: String ) -> void + def to_hash: -> { + detail: OpenAI::Models::Beta::Threads::ImageFileDelta::detail, + file_id: String + } + type detail = :auto | :low | :high module Detail @@ -33,7 +38,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageFileDelta::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageFileDelta::detail] end end end diff --git a/sig/openai/models/beta/threads/image_file_delta_block.rbs b/sig/openai/models/beta/threads/image_file_delta_block.rbs index 7aca7085..e753fcc9 100644 --- a/sig/openai/models/beta/threads/image_file_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_file_delta_block.rbs @@ -25,6 +25,12 @@ module OpenAI ?image_file: OpenAI::Beta::Threads::ImageFileDelta, ?type: :image_file ) -> void + + def to_hash: -> { + index: Integer, + type: :image_file, + image_file: OpenAI::Beta::Threads::ImageFileDelta + } end end end diff --git a/sig/openai/models/beta/threads/image_url.rbs b/sig/openai/models/beta/threads/image_url.rbs index 8808afc1..d0a0d066 100644 --- a/sig/openai/models/beta/threads/image_url.rbs +++ b/sig/openai/models/beta/threads/image_url.rbs @@ -3,22 +3,30 @@ module OpenAI module Beta module Threads type image_url = - { url: String, detail: OpenAI::Beta::Threads::ImageURL::detail } + { + url: String, + detail: OpenAI::Models::Beta::Threads::ImageURL::detail + } class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String - attr_reader detail: OpenAI::Beta::Threads::ImageURL::detail? + attr_reader detail: OpenAI::Models::Beta::Threads::ImageURL::detail? def detail=: ( - OpenAI::Beta::Threads::ImageURL::detail - ) -> OpenAI::Beta::Threads::ImageURL::detail + OpenAI::Models::Beta::Threads::ImageURL::detail + ) -> OpenAI::Models::Beta::Threads::ImageURL::detail def initialize: ( url: String, - ?detail: OpenAI::Beta::Threads::ImageURL::detail + ?detail: OpenAI::Models::Beta::Threads::ImageURL::detail ) -> void + def to_hash: -> { + url: String, + detail: OpenAI::Models::Beta::Threads::ImageURL::detail + } + type detail = :auto | :low | :high module Detail @@ -28,7 +36,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageURL::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageURL::detail] end end end diff --git a/sig/openai/models/beta/threads/image_url_content_block.rbs b/sig/openai/models/beta/threads/image_url_content_block.rbs index 445e5239..8cc69592 100644 --- a/sig/openai/models/beta/threads/image_url_content_block.rbs +++ b/sig/openai/models/beta/threads/image_url_content_block.rbs @@ -14,6 +14,11 @@ module OpenAI image_url: OpenAI::Beta::Threads::ImageURL, ?type: :image_url ) -> void + + def to_hash: -> { + image_url: OpenAI::Beta::Threads::ImageURL, + type: :image_url + } end end end diff --git a/sig/openai/models/beta/threads/image_url_delta.rbs b/sig/openai/models/beta/threads/image_url_delta.rbs index 54d6425a..2c0721e9 100644 --- a/sig/openai/models/beta/threads/image_url_delta.rbs +++ b/sig/openai/models/beta/threads/image_url_delta.rbs @@ -3,24 +3,32 @@ module OpenAI module Beta module Threads type image_url_delta = - { detail: OpenAI::Beta::Threads::ImageURLDelta::detail, url: String } + { + detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, + url: String + } class ImageURLDelta < OpenAI::Internal::Type::BaseModel - attr_reader detail: OpenAI::Beta::Threads::ImageURLDelta::detail? + attr_reader detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail? def detail=: ( - OpenAI::Beta::Threads::ImageURLDelta::detail - ) -> OpenAI::Beta::Threads::ImageURLDelta::detail + OpenAI::Models::Beta::Threads::ImageURLDelta::detail + ) -> OpenAI::Models::Beta::Threads::ImageURLDelta::detail attr_reader url: String? def url=: (String) -> String def initialize: ( - ?detail: OpenAI::Beta::Threads::ImageURLDelta::detail, + ?detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, ?url: String ) -> void + def to_hash: -> { + detail: OpenAI::Models::Beta::Threads::ImageURLDelta::detail, + url: String + } + type detail = :auto | :low | :high module Detail @@ -30,7 +38,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Beta::Threads::ImageURLDelta::detail] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::ImageURLDelta::detail] end end end diff --git a/sig/openai/models/beta/threads/image_url_delta_block.rbs b/sig/openai/models/beta/threads/image_url_delta_block.rbs index d3375983..a0fc6366 100644 --- a/sig/openai/models/beta/threads/image_url_delta_block.rbs +++ b/sig/openai/models/beta/threads/image_url_delta_block.rbs @@ -25,6 +25,12 @@ module OpenAI ?image_url: OpenAI::Beta::Threads::ImageURLDelta, ?type: :image_url ) -> void + + def to_hash: -> { + index: Integer, + type: :image_url, + image_url: OpenAI::Beta::Threads::ImageURLDelta + } end end end diff --git a/sig/openai/models/beta/threads/message.rbs b/sig/openai/models/beta/threads/message.rbs index 10c9bca6..0aa6f377 100644 --- a/sig/openai/models/beta/threads/message.rbs +++ b/sig/openai/models/beta/threads/message.rbs @@ -14,9 +14,9 @@ module OpenAI incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails?, metadata: OpenAI::Models::metadata?, object: :"thread.message", - role: OpenAI::Beta::Threads::Message::role, + role: OpenAI::Models::Beta::Threads::Message::role, run_id: String?, - status: OpenAI::Beta::Threads::Message::status, + status: OpenAI::Models::Beta::Threads::Message::status, thread_id: String } @@ -41,11 +41,11 @@ module OpenAI attr_accessor object: :"thread.message" - attr_accessor role: OpenAI::Beta::Threads::Message::role + attr_accessor role: OpenAI::Models::Beta::Threads::Message::role attr_accessor run_id: String? - attr_accessor status: OpenAI::Beta::Threads::Message::status + attr_accessor status: OpenAI::Models::Beta::Threads::Message::status attr_accessor thread_id: String @@ -59,17 +59,34 @@ module OpenAI incomplete_at: Integer?, incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails?, metadata: OpenAI::Models::metadata?, - role: OpenAI::Beta::Threads::Message::role, + role: OpenAI::Models::Beta::Threads::Message::role, run_id: String?, - status: OpenAI::Beta::Threads::Message::status, + status: OpenAI::Models::Beta::Threads::Message::status, thread_id: String, ?object: :"thread.message" ) -> void + def to_hash: -> { + id: String, + assistant_id: String?, + attachments: ::Array[OpenAI::Beta::Threads::Message::Attachment]?, + completed_at: Integer?, + content: ::Array[OpenAI::Models::Beta::Threads::message_content], + created_at: Integer, + incomplete_at: Integer?, + incomplete_details: OpenAI::Beta::Threads::Message::IncompleteDetails?, + metadata: OpenAI::Models::metadata?, + object: :"thread.message", + role: OpenAI::Models::Beta::Threads::Message::role, + run_id: String?, + status: OpenAI::Models::Beta::Threads::Message::status, + thread_id: String + } + type attachment = { file_id: String, - tools: ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] + tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -77,17 +94,22 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Beta::Threads::Message::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] - ) -> ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] + ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] + ) -> ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] + ?tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] ) -> void + def to_hash: -> { + file_id: String, + tools: ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] + } + type tool = OpenAI::Beta::CodeInterpreterTool | OpenAI::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly @@ -102,24 +124,30 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void + + def to_hash: -> { type: :file_search } end - def self?.variants: -> ::Array[OpenAI::Beta::Threads::Message::Attachment::tool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Message::Attachment::tool] end end type incomplete_details = { - reason: OpenAI::Beta::Threads::Message::IncompleteDetails::reason + reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason } class IncompleteDetails < OpenAI::Internal::Type::BaseModel - attr_accessor reason: OpenAI::Beta::Threads::Message::IncompleteDetails::reason + attr_accessor reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason def initialize: ( - reason: OpenAI::Beta::Threads::Message::IncompleteDetails::reason + reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason ) -> void + def to_hash: -> { + reason: OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason + } + type reason = :content_filter | :max_tokens @@ -136,7 +164,7 @@ module OpenAI RUN_EXPIRED: :run_expired RUN_FAILED: :run_failed - def self?.values: -> ::Array[OpenAI::Beta::Threads::Message::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::IncompleteDetails::reason] end end @@ -148,7 +176,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Beta::Threads::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::role] end type status = :in_progress | :incomplete | :completed @@ -160,7 +188,7 @@ module OpenAI INCOMPLETE: :incomplete COMPLETED: :completed - def self?.values: -> ::Array[OpenAI::Beta::Threads::Message::status] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Message::status] end end end diff --git a/sig/openai/models/beta/threads/message_create_params.rbs b/sig/openai/models/beta/threads/message_create_params.rbs index 48d32702..c1229739 100644 --- a/sig/openai/models/beta/threads/message_create_params.rbs +++ b/sig/openai/models/beta/threads/message_create_params.rbs @@ -31,6 +31,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + content: OpenAI::Models::Beta::Threads::MessageCreateParams::content, + role: OpenAI::Models::Beta::Threads::MessageCreateParams::role, + attachments: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment]?, + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } + type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -57,7 +65,7 @@ module OpenAI type attachment = { file_id: String, - tools: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] + tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -65,17 +73,22 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] - ) -> ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] + ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] + ) -> ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] + ?tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] ) -> void + def to_hash: -> { + file_id: String, + tools: ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] + } + type tool = OpenAI::Beta::CodeInterpreterTool | OpenAI::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch @@ -89,9 +102,11 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void + + def to_hash: -> { type: :file_search } end - def self?.variants: -> ::Array[OpenAI::Beta::Threads::MessageCreateParams::Attachment::tool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::tool] end end end diff --git a/sig/openai/models/beta/threads/message_delete_params.rbs b/sig/openai/models/beta/threads/message_delete_params.rbs index 9edbd8b5..50109473 100644 --- a/sig/openai/models/beta/threads/message_delete_params.rbs +++ b/sig/openai/models/beta/threads/message_delete_params.rbs @@ -15,6 +15,11 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/message_deleted.rbs b/sig/openai/models/beta/threads/message_deleted.rbs index d3b0da8c..5e95bfc4 100644 --- a/sig/openai/models/beta/threads/message_deleted.rbs +++ b/sig/openai/models/beta/threads/message_deleted.rbs @@ -17,6 +17,12 @@ module OpenAI deleted: bool, ?object: :"thread.message.deleted" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"thread.message.deleted" + } end end end diff --git a/sig/openai/models/beta/threads/message_delta.rbs b/sig/openai/models/beta/threads/message_delta.rbs index d953aeb6..a7f93140 100644 --- a/sig/openai/models/beta/threads/message_delta.rbs +++ b/sig/openai/models/beta/threads/message_delta.rbs @@ -5,7 +5,7 @@ module OpenAI type message_delta = { content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], - role: OpenAI::Beta::Threads::MessageDelta::role + role: OpenAI::Models::Beta::Threads::MessageDelta::role } class MessageDelta < OpenAI::Internal::Type::BaseModel @@ -15,17 +15,22 @@ module OpenAI ::Array[OpenAI::Models::Beta::Threads::message_content_delta] ) -> ::Array[OpenAI::Models::Beta::Threads::message_content_delta] - attr_reader role: OpenAI::Beta::Threads::MessageDelta::role? + attr_reader role: OpenAI::Models::Beta::Threads::MessageDelta::role? def role=: ( - OpenAI::Beta::Threads::MessageDelta::role - ) -> OpenAI::Beta::Threads::MessageDelta::role + OpenAI::Models::Beta::Threads::MessageDelta::role + ) -> OpenAI::Models::Beta::Threads::MessageDelta::role def initialize: ( ?content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], - ?role: OpenAI::Beta::Threads::MessageDelta::role + ?role: OpenAI::Models::Beta::Threads::MessageDelta::role ) -> void + def to_hash: -> { + content: ::Array[OpenAI::Models::Beta::Threads::message_content_delta], + role: OpenAI::Models::Beta::Threads::MessageDelta::role + } + type role = :user | :assistant module Role @@ -34,7 +39,7 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Beta::Threads::MessageDelta::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::MessageDelta::role] end end end diff --git a/sig/openai/models/beta/threads/message_delta_event.rbs b/sig/openai/models/beta/threads/message_delta_event.rbs index d84d3446..8068a35a 100644 --- a/sig/openai/models/beta/threads/message_delta_event.rbs +++ b/sig/openai/models/beta/threads/message_delta_event.rbs @@ -21,6 +21,12 @@ module OpenAI delta: OpenAI::Beta::Threads::MessageDelta, ?object: :"thread.message.delta" ) -> void + + def to_hash: -> { + id: String, + delta: OpenAI::Beta::Threads::MessageDelta, + object: :"thread.message.delta" + } end end end diff --git a/sig/openai/models/beta/threads/message_list_params.rbs b/sig/openai/models/beta/threads/message_list_params.rbs index eede9b56..32254799 100644 --- a/sig/openai/models/beta/threads/message_list_params.rbs +++ b/sig/openai/models/beta/threads/message_list_params.rbs @@ -47,6 +47,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + limit: Integer, + order: OpenAI::Models::Beta::Threads::MessageListParams::order, + run_id: String, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/threads/message_retrieve_params.rbs b/sig/openai/models/beta/threads/message_retrieve_params.rbs index 847e3c4c..b0dc3313 100644 --- a/sig/openai/models/beta/threads/message_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/message_retrieve_params.rbs @@ -15,6 +15,11 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/message_update_params.rbs b/sig/openai/models/beta/threads/message_update_params.rbs index 38806f27..a4a5cb18 100644 --- a/sig/openai/models/beta/threads/message_update_params.rbs +++ b/sig/openai/models/beta/threads/message_update_params.rbs @@ -19,6 +19,12 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/refusal_content_block.rbs b/sig/openai/models/beta/threads/refusal_content_block.rbs index 98c6625d..815e53d2 100644 --- a/sig/openai/models/beta/threads/refusal_content_block.rbs +++ b/sig/openai/models/beta/threads/refusal_content_block.rbs @@ -10,6 +10,8 @@ module OpenAI attr_accessor type: :refusal def initialize: (refusal: String, ?type: :refusal) -> void + + def to_hash: -> { refusal: String, type: :refusal } end end end diff --git a/sig/openai/models/beta/threads/refusal_delta_block.rbs b/sig/openai/models/beta/threads/refusal_delta_block.rbs index c6f86524..e38a21fa 100644 --- a/sig/openai/models/beta/threads/refusal_delta_block.rbs +++ b/sig/openai/models/beta/threads/refusal_delta_block.rbs @@ -19,6 +19,8 @@ module OpenAI ?refusal: String, ?type: :refusal ) -> void + + def to_hash: -> { index: Integer, type: :refusal, refusal: String } end end end diff --git a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs index 5fcd15c2..8f6d8be6 100644 --- a/sig/openai/models/beta/threads/required_action_function_tool_call.rbs +++ b/sig/openai/models/beta/threads/required_action_function_tool_call.rbs @@ -22,6 +22,12 @@ module OpenAI ?type: :function ) -> void + def to_hash: -> { + id: String, + function: OpenAI::Beta::Threads::RequiredActionFunctionToolCall::Function, + type: :function + } + type function = { arguments: String, name: String } class Function < OpenAI::Internal::Type::BaseModel @@ -30,6 +36,8 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void + + def to_hash: -> { arguments: String, name: String } end end end diff --git a/sig/openai/models/beta/threads/run.rbs b/sig/openai/models/beta/threads/run.rbs index 50a8ae87..92f30e03 100644 --- a/sig/openai/models/beta/threads/run.rbs +++ b/sig/openai/models/beta/threads/run.rbs @@ -27,7 +27,7 @@ module OpenAI thread_id: String, tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], - truncation_strategy: OpenAI::Beta::TruncationObject?, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy?, usage: OpenAI::Beta::Threads::Run::Usage?, temperature: Float?, top_p: Float? @@ -80,7 +80,7 @@ module OpenAI attr_accessor tools: ::Array[OpenAI::Models::Beta::assistant_tool] - attr_accessor truncation_strategy: OpenAI::Beta::TruncationObject? + attr_accessor truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy? attr_accessor usage: OpenAI::Beta::Threads::Run::Usage? @@ -111,27 +111,63 @@ module OpenAI thread_id: String, tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool], - truncation_strategy: OpenAI::Beta::TruncationObject?, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy?, usage: OpenAI::Beta::Threads::Run::Usage?, ?temperature: Float?, ?top_p: Float?, ?object: :"thread.run" ) -> void + def to_hash: -> { + id: String, + assistant_id: String, + cancelled_at: Integer?, + completed_at: Integer?, + created_at: Integer, + expires_at: Integer?, + failed_at: Integer?, + incomplete_details: OpenAI::Beta::Threads::Run::IncompleteDetails?, + instructions: String, + last_error: OpenAI::Beta::Threads::Run::LastError?, + max_completion_tokens: Integer?, + max_prompt_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + model: String, + object: :"thread.run", + parallel_tool_calls: bool, + required_action: OpenAI::Beta::Threads::Run::RequiredAction?, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + started_at: Integer?, + status: OpenAI::Models::Beta::Threads::run_status, + thread_id: String, + tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool], + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy?, + usage: OpenAI::Beta::Threads::Run::Usage?, + temperature: Float?, + top_p: Float? + } + type incomplete_details = - { reason: OpenAI::Beta::Threads::Run::IncompleteDetails::reason } + { + reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason + } class IncompleteDetails < OpenAI::Internal::Type::BaseModel - attr_reader reason: OpenAI::Beta::Threads::Run::IncompleteDetails::reason? + attr_reader reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason? def reason=: ( - OpenAI::Beta::Threads::Run::IncompleteDetails::reason - ) -> OpenAI::Beta::Threads::Run::IncompleteDetails::reason + OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason + ) -> OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason def initialize: ( - ?reason: OpenAI::Beta::Threads::Run::IncompleteDetails::reason + ?reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason ) -> void + def to_hash: -> { + reason: OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason + } + type reason = :max_completion_tokens | :max_prompt_tokens module Reason @@ -140,26 +176,31 @@ module OpenAI MAX_COMPLETION_TOKENS: :max_completion_tokens MAX_PROMPT_TOKENS: :max_prompt_tokens - def self?.values: -> ::Array[OpenAI::Beta::Threads::Run::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::IncompleteDetails::reason] end end type last_error = { - code: OpenAI::Beta::Threads::Run::LastError::code, + code: OpenAI::Models::Beta::Threads::Run::LastError::code, message: String } class LastError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::Beta::Threads::Run::LastError::code + attr_accessor code: OpenAI::Models::Beta::Threads::Run::LastError::code attr_accessor message: String def initialize: ( - code: OpenAI::Beta::Threads::Run::LastError::code, + code: OpenAI::Models::Beta::Threads::Run::LastError::code, message: String ) -> void + def to_hash: -> { + code: OpenAI::Models::Beta::Threads::Run::LastError::code, + message: String + } + type code = :server_error | :rate_limit_exceeded | :invalid_prompt module Code @@ -169,7 +210,7 @@ module OpenAI RATE_LIMIT_EXCEEDED: :rate_limit_exceeded INVALID_PROMPT: :invalid_prompt - def self?.values: -> ::Array[OpenAI::Beta::Threads::Run::LastError::code] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::LastError::code] end end @@ -189,6 +230,11 @@ module OpenAI ?type: :submit_tool_outputs ) -> void + def to_hash: -> { + submit_tool_outputs: OpenAI::Beta::Threads::Run::RequiredAction::SubmitToolOutputs, + type: :submit_tool_outputs + } + type submit_tool_outputs = { tool_calls: ::Array[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] @@ -200,6 +246,43 @@ module OpenAI def initialize: ( tool_calls: ::Array[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] ) -> void + + def to_hash: -> { + tool_calls: ::Array[OpenAI::Beta::Threads::RequiredActionFunctionToolCall] + } + end + end + + type truncation_strategy = + { + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, + last_messages: Integer? + } + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + attr_accessor type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_ + + attr_accessor last_messages: Integer? + + def initialize: ( + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, + ?last_messages: Integer? + ) -> void + + def to_hash: -> { + type: OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_, + last_messages: Integer? + } + + type type_ = :auto | :last_messages + + module Type + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + LAST_MESSAGES: :last_messages + + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Run::TruncationStrategy::type_] end end @@ -222,6 +305,12 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } end end end diff --git a/sig/openai/models/beta/threads/run_cancel_params.rbs b/sig/openai/models/beta/threads/run_cancel_params.rbs index 03525bb2..d96641a6 100644 --- a/sig/openai/models/beta/threads/run_cancel_params.rbs +++ b/sig/openai/models/beta/threads/run_cancel_params.rbs @@ -15,6 +15,11 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/run_create_params.rbs b/sig/openai/models/beta/threads/run_create_params.rbs index c4f2cac4..a9c8e394 100644 --- a/sig/openai/models/beta/threads/run_create_params.rbs +++ b/sig/openai/models/beta/threads/run_create_params.rbs @@ -20,7 +20,7 @@ module OpenAI tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, top_p: Float?, - truncation_strategy: OpenAI::Beta::TruncationObject? + truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy? } & OpenAI::Internal::Type::request_parameters @@ -66,7 +66,7 @@ module OpenAI attr_accessor top_p: Float? - attr_accessor truncation_strategy: OpenAI::Beta::TruncationObject? + attr_accessor truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy? def initialize: ( assistant_id: String, @@ -85,34 +85,62 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + assistant_id: String, + include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + additional_instructions: String?, + additional_messages: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage]?, + instructions: String?, + max_completion_tokens: Integer?, + max_prompt_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::Beta::Threads::RunCreateParams::model?, + parallel_tool_calls: bool, + reasoning_effort: OpenAI::Models::reasoning_effort?, + response_format: OpenAI::Models::Beta::assistant_response_format_option?, + temperature: Float?, + tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, + tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, + top_p: Float?, + truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, + request_options: OpenAI::RequestOptions + } + type additional_message = { - content: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content, - role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role, + content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, metadata: OpenAI::Models::metadata? } class AdditionalMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content + attr_accessor content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content - attr_accessor role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role + attr_accessor role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role attr_accessor attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]? attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( - content: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content, - role: OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role, + content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, ?attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, ?metadata: OpenAI::Models::metadata? ) -> void + def to_hash: -> { + content: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content, + role: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role, + attachments: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment]?, + metadata: OpenAI::Models::metadata? + } + type content = String | ::Array[OpenAI::Models::Beta::Threads::message_content_part_param] @@ -120,7 +148,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::content] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::content] MessageContentPartParamArray: OpenAI::Internal::Type::Converter end @@ -133,13 +161,13 @@ module OpenAI USER: :user ASSISTANT: :assistant - def self?.values: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::role] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::role] end type attachment = { file_id: String, - tools: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] } class Attachment < OpenAI::Internal::Type::BaseModel @@ -147,17 +175,22 @@ module OpenAI def file_id=: (String) -> String - attr_reader tools: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool]? + attr_reader tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool]? def tools=: ( - ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] - ) -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ) -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] def initialize: ( ?file_id: String, - ?tools: ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + ?tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] ) -> void + def to_hash: -> { + file_id: String, + tools: ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + } + type tool = OpenAI::Beta::CodeInterpreterTool | OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch @@ -171,9 +204,11 @@ module OpenAI attr_accessor type: :file_search def initialize: (?type: :file_search) -> void + + def to_hash: -> { type: :file_search } end - def self?.variants: -> ::Array[OpenAI::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::tool] end end end @@ -185,6 +220,39 @@ module OpenAI def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::model] end + + type truncation_strategy = + { + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + last_messages: Integer? + } + + class TruncationStrategy < OpenAI::Internal::Type::BaseModel + attr_accessor type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_ + + attr_accessor last_messages: Integer? + + def initialize: ( + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + ?last_messages: Integer? + ) -> void + + def to_hash: -> { + type: OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_, + last_messages: Integer? + } + + type type_ = :auto | :last_messages + + module Type + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + LAST_MESSAGES: :last_messages + + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::type_] + end + end end end end diff --git a/sig/openai/models/beta/threads/run_list_params.rbs b/sig/openai/models/beta/threads/run_list_params.rbs index de898e20..fa76718f 100644 --- a/sig/openai/models/beta/threads/run_list_params.rbs +++ b/sig/openai/models/beta/threads/run_list_params.rbs @@ -41,6 +41,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + limit: Integer, + order: OpenAI::Models::Beta::Threads::RunListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/threads/run_retrieve_params.rbs b/sig/openai/models/beta/threads/run_retrieve_params.rbs index c9efe99b..d100bece 100644 --- a/sig/openai/models/beta/threads/run_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/run_retrieve_params.rbs @@ -15,6 +15,11 @@ module OpenAI thread_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs index 4e87b641..8d499043 100644 --- a/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs +++ b/sig/openai/models/beta/threads/run_submit_tool_outputs_params.rbs @@ -23,6 +23,12 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + thread_id: String, + tool_outputs: ::Array[OpenAI::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput], + request_options: OpenAI::RequestOptions + } + type tool_output = { output: String, tool_call_id: String } class ToolOutput < OpenAI::Internal::Type::BaseModel @@ -35,6 +41,8 @@ module OpenAI def tool_call_id=: (String) -> String def initialize: (?output: String, ?tool_call_id: String) -> void + + def to_hash: -> { output: String, tool_call_id: String } end end end diff --git a/sig/openai/models/beta/threads/run_update_params.rbs b/sig/openai/models/beta/threads/run_update_params.rbs index 28ff20c9..664db83c 100644 --- a/sig/openai/models/beta/threads/run_update_params.rbs +++ b/sig/openai/models/beta/threads/run_update_params.rbs @@ -19,6 +19,12 @@ module OpenAI ?metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs index 35175f5b..31b1e593 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_logs.rbs @@ -20,6 +20,8 @@ module OpenAI ?logs: String, ?type: :logs ) -> void + + def to_hash: -> { index: Integer, type: :logs, logs: String } end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs index a90448ca..012b6c3f 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_output_image.rbs @@ -27,6 +27,12 @@ module OpenAI ?type: :image ) -> void + def to_hash: -> { + index: Integer, + type: :image, + image: OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage::Image + } + type image = { file_id: String } class Image < OpenAI::Internal::Type::BaseModel @@ -35,6 +41,8 @@ module OpenAI def file_id=: (String) -> String def initialize: (?file_id: String) -> void + + def to_hash: -> { file_id: String } end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs index a5051e0b..68ed586a 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call.rbs @@ -23,22 +23,33 @@ module OpenAI ?type: :code_interpreter ) -> void + def to_hash: -> { + id: String, + code_interpreter: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter, + type: :code_interpreter + } + type code_interpreter = { input: String, - outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel attr_accessor input: String - attr_accessor outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + attr_accessor outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] def initialize: ( input: String, - outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] ) -> void + def to_hash: -> { + input: String, + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + } + type output = OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs | OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image @@ -54,6 +65,8 @@ module OpenAI attr_accessor type: :logs def initialize: (logs: String, ?type: :logs) -> void + + def to_hash: -> { logs: String, type: :logs } end type image = @@ -72,16 +85,23 @@ module OpenAI ?type: :image ) -> void + def to_hash: -> { + image: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image, + type: :image + } + type image = { file_id: String } class Image < OpenAI::Internal::Type::BaseModel attr_accessor file_id: String def initialize: (file_id: String) -> void + + def to_hash: -> { file_id: String } end end - def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::output] end end end diff --git a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs index e15f351d..650b7203 100644 --- a/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rbs @@ -33,10 +33,17 @@ module OpenAI ?type: :code_interpreter ) -> void + def to_hash: -> { + index: Integer, + type: :code_interpreter, + id: String, + code_interpreter: OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter + } + type code_interpreter = { input: String, - outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] } class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -44,17 +51,22 @@ module OpenAI def input=: (String) -> String - attr_reader outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output]? + attr_reader outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output]? def outputs=: ( - ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] - ) -> ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ) -> ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] def initialize: ( ?input: String, - ?outputs: ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + ?outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] ) -> void + def to_hash: -> { + input: String, + outputs: ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + } + type output = OpenAI::Beta::Threads::Runs::CodeInterpreterLogs | OpenAI::Beta::Threads::Runs::CodeInterpreterOutputImage @@ -62,7 +74,7 @@ module OpenAI module Output extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::output] end end end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs index 5c49e956..ca0f5a8e 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call.rbs @@ -23,6 +23,12 @@ module OpenAI ?type: :file_search ) -> void + def to_hash: -> { + id: String, + file_search: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch, + type: :file_search + } + type file_search = { ranking_options: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, @@ -47,22 +53,32 @@ module OpenAI ?results: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] ) -> void + def to_hash: -> { + ranking_options: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, + results: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] + } + type ranking_options = { - ranker: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, score_threshold: Float } class RankingOptions < OpenAI::Internal::Type::BaseModel - attr_accessor ranker: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker + attr_accessor ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker attr_accessor score_threshold: Float def initialize: ( - ranker: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, score_threshold: Float ) -> void + def to_hash: -> { + ranker: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker, + score_threshold: Float + } + type ranker = :auto | :default_2024_08_21 module Ranker @@ -71,7 +87,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_08_21: :default_2024_08_21 - def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::ranker] end end @@ -103,10 +119,17 @@ module OpenAI ?content: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] ) -> void + def to_hash: -> { + file_id: String, + file_name: String, + score: Float, + content: ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] + } + type content = { text: String, - type: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ } class Content < OpenAI::Internal::Type::BaseModel @@ -114,17 +137,22 @@ module OpenAI def text=: (String) -> String - attr_reader type: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_? + attr_reader type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_? def type=: ( - OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ - ) -> OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + ) -> OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ def initialize: ( ?text: String, - ?type: OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + ?type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ ) -> void + def to_hash: -> { + text: String, + type: OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_ + } + type type_ = :text module Type @@ -132,7 +160,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::type_] end end end diff --git a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs index 15b045e3..ebac80d2 100644 --- a/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/file_search_tool_call_delta.rbs @@ -23,6 +23,13 @@ module OpenAI ?id: String, ?type: :file_search ) -> void + + def to_hash: -> { + file_search: top, + index: Integer, + type: :file_search, + id: String + } end end end diff --git a/sig/openai/models/beta/threads/runs/function_tool_call.rbs b/sig/openai/models/beta/threads/runs/function_tool_call.rbs index cb76a0f6..aa424fe9 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call.rbs @@ -23,6 +23,12 @@ module OpenAI ?type: :function ) -> void + def to_hash: -> { + id: String, + function: OpenAI::Beta::Threads::Runs::FunctionToolCall::Function, + type: :function + } + type function = { arguments: String, name: String, output: String? } class Function < OpenAI::Internal::Type::BaseModel @@ -37,6 +43,12 @@ module OpenAI name: String, output: String? ) -> void + + def to_hash: -> { + arguments: String, + name: String, + output: String? + } end end end diff --git a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs index 44cf047c..2955d8c3 100644 --- a/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs +++ b/sig/openai/models/beta/threads/runs/function_tool_call_delta.rbs @@ -33,6 +33,13 @@ module OpenAI ?type: :function ) -> void + def to_hash: -> { + index: Integer, + type: :function, + id: String, + function: OpenAI::Beta::Threads::Runs::FunctionToolCallDelta::Function + } + type function = { arguments: String, name: String, output: String? } class Function < OpenAI::Internal::Type::BaseModel @@ -51,6 +58,12 @@ module OpenAI ?name: String, ?output: String? ) -> void + + def to_hash: -> { + arguments: String, + name: String, + output: String? + } end end end diff --git a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs index 0ec0c19f..6cfec2be 100644 --- a/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/message_creation_step_details.rbs @@ -19,12 +19,19 @@ module OpenAI ?type: :message_creation ) -> void + def to_hash: -> { + message_creation: OpenAI::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation, + type: :message_creation + } + type message_creation = { message_id: String } class MessageCreation < OpenAI::Internal::Type::BaseModel attr_accessor message_id: String def initialize: (message_id: String) -> void + + def to_hash: -> { message_id: String } end end end diff --git a/sig/openai/models/beta/threads/runs/run_step.rbs b/sig/openai/models/beta/threads/runs/run_step.rbs index 164ba811..f58bd943 100644 --- a/sig/openai/models/beta/threads/runs/run_step.rbs +++ b/sig/openai/models/beta/threads/runs/run_step.rbs @@ -18,10 +18,10 @@ module OpenAI metadata: OpenAI::Models::metadata?, object: :"thread.run.step", run_id: String, - status: OpenAI::Beta::Threads::Runs::RunStep::status, - step_details: OpenAI::Beta::Threads::Runs::RunStep::step_details, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::status, + step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details, thread_id: String, - type: OpenAI::Beta::Threads::Runs::RunStep::type_, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, usage: OpenAI::Beta::Threads::Runs::RunStep::Usage? } @@ -48,13 +48,13 @@ module OpenAI attr_accessor run_id: String - attr_accessor status: OpenAI::Beta::Threads::Runs::RunStep::status + attr_accessor status: OpenAI::Models::Beta::Threads::Runs::RunStep::status - attr_accessor step_details: OpenAI::Beta::Threads::Runs::RunStep::step_details + attr_accessor step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details attr_accessor thread_id: String - attr_accessor type: OpenAI::Beta::Threads::Runs::RunStep::type_ + attr_accessor type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_ attr_accessor usage: OpenAI::Beta::Threads::Runs::RunStep::Usage? @@ -69,30 +69,54 @@ module OpenAI last_error: OpenAI::Beta::Threads::Runs::RunStep::LastError?, metadata: OpenAI::Models::metadata?, run_id: String, - status: OpenAI::Beta::Threads::Runs::RunStep::status, - step_details: OpenAI::Beta::Threads::Runs::RunStep::step_details, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::status, + step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details, thread_id: String, - type: OpenAI::Beta::Threads::Runs::RunStep::type_, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, usage: OpenAI::Beta::Threads::Runs::RunStep::Usage?, ?object: :"thread.run.step" ) -> void + def to_hash: -> { + id: String, + assistant_id: String, + cancelled_at: Integer?, + completed_at: Integer?, + created_at: Integer, + expired_at: Integer?, + failed_at: Integer?, + last_error: OpenAI::Beta::Threads::Runs::RunStep::LastError?, + metadata: OpenAI::Models::metadata?, + object: :"thread.run.step", + run_id: String, + status: OpenAI::Models::Beta::Threads::Runs::RunStep::status, + step_details: OpenAI::Models::Beta::Threads::Runs::RunStep::step_details, + thread_id: String, + type: OpenAI::Models::Beta::Threads::Runs::RunStep::type_, + usage: OpenAI::Beta::Threads::Runs::RunStep::Usage? + } + type last_error = { - code: OpenAI::Beta::Threads::Runs::RunStep::LastError::code, + code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code, message: String } class LastError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::Beta::Threads::Runs::RunStep::LastError::code + attr_accessor code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code attr_accessor message: String def initialize: ( - code: OpenAI::Beta::Threads::Runs::RunStep::LastError::code, + code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code, message: String ) -> void + def to_hash: -> { + code: OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code, + message: String + } + type code = :server_error | :rate_limit_exceeded module Code @@ -101,7 +125,7 @@ module OpenAI SERVER_ERROR: :server_error RATE_LIMIT_EXCEEDED: :rate_limit_exceeded - def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::LastError::code] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::code] end end @@ -117,7 +141,7 @@ module OpenAI COMPLETED: :completed EXPIRED: :expired - def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::status] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::status] end type step_details = @@ -127,7 +151,7 @@ module OpenAI module StepDetails extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::step_details] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::step_details] end type type_ = :message_creation | :tool_calls @@ -138,7 +162,7 @@ module OpenAI MESSAGE_CREATION: :message_creation TOOL_CALLS: :tool_calls - def self?.values: -> ::Array[OpenAI::Beta::Threads::Runs::RunStep::type_] + def self?.values: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStep::type_] end type usage = @@ -160,6 +184,12 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } end end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta.rbs index cd977802..010d9373 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta.rbs @@ -7,20 +7,24 @@ module OpenAI module Runs type run_step_delta = { - step_details: OpenAI::Beta::Threads::Runs::RunStepDelta::step_details + step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details } class RunStepDelta < OpenAI::Internal::Type::BaseModel - attr_reader step_details: OpenAI::Beta::Threads::Runs::RunStepDelta::step_details? + attr_reader step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details? def step_details=: ( - OpenAI::Beta::Threads::Runs::RunStepDelta::step_details - ) -> OpenAI::Beta::Threads::Runs::RunStepDelta::step_details + OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details + ) -> OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details def initialize: ( - ?step_details: OpenAI::Beta::Threads::Runs::RunStepDelta::step_details + ?step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details ) -> void + def to_hash: -> { + step_details: OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details + } + type step_details = OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta | OpenAI::Beta::Threads::Runs::ToolCallDeltaObject @@ -28,7 +32,7 @@ module OpenAI module StepDetails extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Beta::Threads::Runs::RunStepDelta::step_details] + def self?.variants: -> ::Array[OpenAI::Models::Beta::Threads::Runs::RunStepDelta::step_details] end end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs index a0e9f5e8..70ca4c44 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_event.rbs @@ -24,6 +24,12 @@ module OpenAI delta: OpenAI::Beta::Threads::Runs::RunStepDelta, ?object: :"thread.run.step.delta" ) -> void + + def to_hash: -> { + id: String, + delta: OpenAI::Beta::Threads::Runs::RunStepDelta, + object: :"thread.run.step.delta" + } end end end diff --git a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs index dae61d3d..0bb8a0bb 100644 --- a/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs +++ b/sig/openai/models/beta/threads/runs/run_step_delta_message_delta.rbs @@ -25,6 +25,11 @@ module OpenAI ?type: :message_creation ) -> void + def to_hash: -> { + type: :message_creation, + message_creation: OpenAI::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation + } + type message_creation = { message_id: String } class MessageCreation < OpenAI::Internal::Type::BaseModel @@ -33,6 +38,8 @@ module OpenAI def message_id=: (String) -> String def initialize: (?message_id: String) -> void + + def to_hash: -> { message_id: String } end end end diff --git a/sig/openai/models/beta/threads/runs/step_list_params.rbs b/sig/openai/models/beta/threads/runs/step_list_params.rbs index 786d087f..2eb9aae2 100644 --- a/sig/openai/models/beta/threads/runs/step_list_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_list_params.rbs @@ -54,6 +54,16 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + thread_id: String, + after: String, + before: String, + include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + limit: Integer, + order: OpenAI::Models::Beta::Threads::Runs::StepListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs index 414a4b61..94750c9f 100644 --- a/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs +++ b/sig/openai/models/beta/threads/runs/step_retrieve_params.rbs @@ -31,6 +31,13 @@ module OpenAI ?include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + thread_id: String, + run_id: String, + include: ::Array[OpenAI::Models::Beta::Threads::Runs::run_step_include], + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs index 2b6aefef..758dd0c0 100644 --- a/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs +++ b/sig/openai/models/beta/threads/runs/tool_call_delta_object.rbs @@ -22,6 +22,11 @@ module OpenAI ?tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta], ?type: :tool_calls ) -> void + + def to_hash: -> { + type: :tool_calls, + tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call_delta] + } end end end diff --git a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs index b5aaf17e..91baa731 100644 --- a/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs +++ b/sig/openai/models/beta/threads/runs/tool_calls_step_details.rbs @@ -18,6 +18,11 @@ module OpenAI tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call], ?type: :tool_calls ) -> void + + def to_hash: -> { + tool_calls: ::Array[OpenAI::Models::Beta::Threads::Runs::tool_call], + type: :tool_calls + } end end end diff --git a/sig/openai/models/beta/threads/text.rbs b/sig/openai/models/beta/threads/text.rbs index a6585bbd..faff33c1 100644 --- a/sig/openai/models/beta/threads/text.rbs +++ b/sig/openai/models/beta/threads/text.rbs @@ -17,6 +17,11 @@ module OpenAI annotations: ::Array[OpenAI::Models::Beta::Threads::annotation], value: String ) -> void + + def to_hash: -> { + annotations: ::Array[OpenAI::Models::Beta::Threads::annotation], + value: String + } end end end diff --git a/sig/openai/models/beta/threads/text_content_block.rbs b/sig/openai/models/beta/threads/text_content_block.rbs index 5192465f..16343075 100644 --- a/sig/openai/models/beta/threads/text_content_block.rbs +++ b/sig/openai/models/beta/threads/text_content_block.rbs @@ -14,6 +14,8 @@ module OpenAI text: OpenAI::Beta::Threads::Text, ?type: :text ) -> void + + def to_hash: -> { text: OpenAI::Beta::Threads::Text, type: :text } end end end diff --git a/sig/openai/models/beta/threads/text_content_block_param.rbs b/sig/openai/models/beta/threads/text_content_block_param.rbs index 7b4be77b..2805c546 100644 --- a/sig/openai/models/beta/threads/text_content_block_param.rbs +++ b/sig/openai/models/beta/threads/text_content_block_param.rbs @@ -10,6 +10,8 @@ module OpenAI attr_accessor type: :text def initialize: (text: String, ?type: :text) -> void + + def to_hash: -> { text: String, type: :text } end end end diff --git a/sig/openai/models/beta/threads/text_delta.rbs b/sig/openai/models/beta/threads/text_delta.rbs index 9bad2d71..fe100222 100644 --- a/sig/openai/models/beta/threads/text_delta.rbs +++ b/sig/openai/models/beta/threads/text_delta.rbs @@ -23,6 +23,11 @@ module OpenAI ?annotations: ::Array[OpenAI::Models::Beta::Threads::annotation_delta], ?value: String ) -> void + + def to_hash: -> { + annotations: ::Array[OpenAI::Models::Beta::Threads::annotation_delta], + value: String + } end end end diff --git a/sig/openai/models/beta/threads/text_delta_block.rbs b/sig/openai/models/beta/threads/text_delta_block.rbs index 261d3fc6..5db737c3 100644 --- a/sig/openai/models/beta/threads/text_delta_block.rbs +++ b/sig/openai/models/beta/threads/text_delta_block.rbs @@ -25,6 +25,12 @@ module OpenAI ?text: OpenAI::Beta::Threads::TextDelta, ?type: :text ) -> void + + def to_hash: -> { + index: Integer, + type: :text, + text: OpenAI::Beta::Threads::TextDelta + } end end end diff --git a/sig/openai/models/beta/truncation_object.rbs b/sig/openai/models/beta/truncation_object.rbs deleted file mode 100644 index 4e4f1ae3..00000000 --- a/sig/openai/models/beta/truncation_object.rbs +++ /dev/null @@ -1,30 +0,0 @@ -module OpenAI - module Models - module Beta - type truncation_object = - { type: OpenAI::Beta::TruncationObject::type_, last_messages: Integer? } - - class TruncationObject < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Beta::TruncationObject::type_ - - attr_accessor last_messages: Integer? - - def initialize: ( - type: OpenAI::Beta::TruncationObject::type_, - ?last_messages: Integer? - ) -> void - - type type_ = :auto | :last_messages - - module Type - extend OpenAI::Internal::Type::Enum - - AUTO: :auto - LAST_MESSAGES: :last_messages - - def self?.values: -> ::Array[OpenAI::Beta::TruncationObject::type_] - end - end - end - end -end diff --git a/sig/openai/models/chat/chat_completion.rbs b/sig/openai/models/chat/chat_completion.rbs index b717a8ba..e66a399d 100644 --- a/sig/openai/models/chat/chat_completion.rbs +++ b/sig/openai/models/chat/chat_completion.rbs @@ -10,7 +10,7 @@ module OpenAI created: Integer, model: String, object: :"chat.completion", - service_tier: OpenAI::Chat::ChatCompletion::service_tier?, + service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, system_fingerprint: String, usage: OpenAI::CompletionUsage } @@ -26,7 +26,7 @@ module OpenAI attr_accessor object: :"chat.completion" - attr_accessor service_tier: OpenAI::Chat::ChatCompletion::service_tier? + attr_accessor service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier? attr_reader system_fingerprint: String? @@ -41,22 +41,33 @@ module OpenAI choices: ::Array[OpenAI::Chat::ChatCompletion::Choice], created: Integer, model: String, - ?service_tier: OpenAI::Chat::ChatCompletion::service_tier?, + ?service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, ?system_fingerprint: String, ?usage: OpenAI::CompletionUsage, ?object: :"chat.completion" ) -> void + def to_hash: -> { + id: String, + choices: ::Array[OpenAI::Chat::ChatCompletion::Choice], + created: Integer, + model: String, + object: :"chat.completion", + service_tier: OpenAI::Models::Chat::ChatCompletion::service_tier?, + system_fingerprint: String, + usage: OpenAI::CompletionUsage + } + type choice = { - finish_reason: OpenAI::Chat::ChatCompletion::Choice::finish_reason, + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason, index: Integer, logprobs: OpenAI::Chat::ChatCompletion::Choice::Logprobs?, message: OpenAI::Chat::ChatCompletionMessage } class Choice < OpenAI::Internal::Type::BaseModel - attr_accessor finish_reason: OpenAI::Chat::ChatCompletion::Choice::finish_reason + attr_accessor finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason attr_accessor index: Integer @@ -65,12 +76,19 @@ module OpenAI attr_accessor message: OpenAI::Chat::ChatCompletionMessage def initialize: ( - finish_reason: OpenAI::Chat::ChatCompletion::Choice::finish_reason, + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason, index: Integer, logprobs: OpenAI::Chat::ChatCompletion::Choice::Logprobs?, message: OpenAI::Chat::ChatCompletionMessage ) -> void + def to_hash: -> { + finish_reason: OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason, + index: Integer, + logprobs: OpenAI::Chat::ChatCompletion::Choice::Logprobs?, + message: OpenAI::Chat::ChatCompletionMessage + } + type finish_reason = :stop | :length | :tool_calls | :content_filter | :function_call @@ -83,7 +101,7 @@ module OpenAI CONTENT_FILTER: :content_filter FUNCTION_CALL: :function_call - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletion::Choice::finish_reason] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::Choice::finish_reason] end type logprobs = @@ -101,6 +119,11 @@ module OpenAI content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, + refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? + } end end @@ -113,7 +136,7 @@ module OpenAI DEFAULT: :default FLEX: :flex - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletion::service_tier] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletion::service_tier] end end end diff --git a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs index bf03ff8c..afc4b011 100644 --- a/sig/openai/models/chat/chat_completion_assistant_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_assistant_message_param.rbs @@ -7,7 +7,7 @@ module OpenAI { role: :assistant, audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio?, - content: OpenAI::Chat::ChatCompletionAssistantMessageParam::content?, + content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, name: String, refusal: String?, @@ -19,7 +19,7 @@ module OpenAI attr_accessor audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio? - attr_accessor content: OpenAI::Chat::ChatCompletionAssistantMessageParam::content? + attr_accessor content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content? attr_accessor function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall? @@ -37,7 +37,7 @@ module OpenAI def initialize: ( ?audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio?, - ?content: OpenAI::Chat::ChatCompletionAssistantMessageParam::content?, + ?content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, ?function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, ?name: String, ?refusal: String?, @@ -45,17 +45,29 @@ module OpenAI ?role: :assistant ) -> void + def to_hash: -> { + role: :assistant, + audio: OpenAI::Chat::ChatCompletionAssistantMessageParam::Audio?, + content: OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content?, + function_call: OpenAI::Chat::ChatCompletionAssistantMessageParam::FunctionCall?, + name: String, + refusal: String?, + tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + } + type audio = { id: String } class Audio < OpenAI::Internal::Type::BaseModel attr_accessor id: String def initialize: (id: String) -> void + + def to_hash: -> { id: String } end type content = String - | ::Array[OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] + | ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] module Content extend OpenAI::Internal::Type::Union @@ -67,10 +79,10 @@ module OpenAI module ArrayOfContentPart extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::array_of_content_part] end - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionAssistantMessageParam::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::content] ArrayOfContentPartArray: OpenAI::Internal::Type::Converter end @@ -83,6 +95,8 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void + + def to_hash: -> { arguments: String, name: String } end end end diff --git a/sig/openai/models/chat/chat_completion_audio.rbs b/sig/openai/models/chat/chat_completion_audio.rbs index 596be96d..e1cbcccc 100644 --- a/sig/openai/models/chat/chat_completion_audio.rbs +++ b/sig/openai/models/chat/chat_completion_audio.rbs @@ -21,6 +21,13 @@ module OpenAI expires_at: Integer, transcript: String ) -> void + + def to_hash: -> { + id: String, + data: String, + expires_at: Integer, + transcript: String + } end end end diff --git a/sig/openai/models/chat/chat_completion_audio_param.rbs b/sig/openai/models/chat/chat_completion_audio_param.rbs index b326faf7..d4689dfe 100644 --- a/sig/openai/models/chat/chat_completion_audio_param.rbs +++ b/sig/openai/models/chat/chat_completion_audio_param.rbs @@ -5,20 +5,25 @@ module OpenAI module Chat type chat_completion_audio_param = { - format_: OpenAI::Chat::ChatCompletionAudioParam::format_, - voice: OpenAI::Chat::ChatCompletionAudioParam::voice + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice } class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel - attr_accessor format_: OpenAI::Chat::ChatCompletionAudioParam::format_ + attr_accessor format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_ - attr_accessor voice: OpenAI::Chat::ChatCompletionAudioParam::voice + attr_accessor voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice def initialize: ( - format_: OpenAI::Chat::ChatCompletionAudioParam::format_, - voice: OpenAI::Chat::ChatCompletionAudioParam::voice + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice ) -> void + def to_hash: -> { + format_: OpenAI::Models::Chat::ChatCompletionAudioParam::format_, + voice: OpenAI::Models::Chat::ChatCompletionAudioParam::voice + } + type format_ = :wav | :aac | :mp3 | :flac | :opus | :pcm16 module Format @@ -31,7 +36,7 @@ module OpenAI OPUS: :opus PCM16: :pcm16 - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionAudioParam::format_] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::format_] end type voice = @@ -51,7 +56,7 @@ module OpenAI module Voice extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionAudioParam::voice] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionAudioParam::voice] ALLOY: :alloy ASH: :ash diff --git a/sig/openai/models/chat/chat_completion_chunk.rbs b/sig/openai/models/chat/chat_completion_chunk.rbs index 2a451ed9..0e7acf36 100644 --- a/sig/openai/models/chat/chat_completion_chunk.rbs +++ b/sig/openai/models/chat/chat_completion_chunk.rbs @@ -10,7 +10,7 @@ module OpenAI created: Integer, model: String, object: :"chat.completion.chunk", - service_tier: OpenAI::Chat::ChatCompletionChunk::service_tier?, + service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, system_fingerprint: String, usage: OpenAI::CompletionUsage? } @@ -26,7 +26,7 @@ module OpenAI attr_accessor object: :"chat.completion.chunk" - attr_accessor service_tier: OpenAI::Chat::ChatCompletionChunk::service_tier? + attr_accessor service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier? attr_reader system_fingerprint: String? @@ -39,16 +39,27 @@ module OpenAI choices: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice], created: Integer, model: String, - ?service_tier: OpenAI::Chat::ChatCompletionChunk::service_tier?, + ?service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, ?system_fingerprint: String, ?usage: OpenAI::CompletionUsage?, ?object: :"chat.completion.chunk" ) -> void + def to_hash: -> { + id: String, + choices: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice], + created: Integer, + model: String, + object: :"chat.completion.chunk", + service_tier: OpenAI::Models::Chat::ChatCompletionChunk::service_tier?, + system_fingerprint: String, + usage: OpenAI::CompletionUsage? + } + type choice = { delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason?, + finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, index: Integer, logprobs: OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs? } @@ -56,7 +67,7 @@ module OpenAI class Choice < OpenAI::Internal::Type::BaseModel attr_accessor delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta - attr_accessor finish_reason: OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason? + attr_accessor finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason? attr_accessor index: Integer @@ -64,17 +75,24 @@ module OpenAI def initialize: ( delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta, - finish_reason: OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason?, + finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, index: Integer, ?logprobs: OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs? ) -> void + def to_hash: -> { + delta: OpenAI::Chat::ChatCompletionChunk::Choice::Delta, + finish_reason: OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason?, + index: Integer, + logprobs: OpenAI::Chat::ChatCompletionChunk::Choice::Logprobs? + } + type delta = { content: String?, function_call: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, refusal: String?, - role: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role, + role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } @@ -89,11 +107,11 @@ module OpenAI attr_accessor refusal: String? - attr_reader role: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role? + attr_reader role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role? def role=: ( - OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role - ) -> OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role + OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role + ) -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role attr_reader tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall]? @@ -105,10 +123,18 @@ module OpenAI ?content: String?, ?function_call: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, ?refusal: String?, - ?role: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role, + ?role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, ?tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] ) -> void + def to_hash: -> { + content: String?, + function_call: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, + refusal: String?, + role: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role, + tool_calls: ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] + } + type function_call = { arguments: String, name: String } class FunctionCall < OpenAI::Internal::Type::BaseModel @@ -121,6 +147,8 @@ module OpenAI def name=: (String) -> String def initialize: (?arguments: String, ?name: String) -> void + + def to_hash: -> { arguments: String, name: String } end type role = :developer | :system | :user | :assistant | :tool @@ -134,7 +162,7 @@ module OpenAI ASSISTANT: :assistant TOOL: :tool - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::role] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::role] end type tool_call = @@ -142,7 +170,7 @@ module OpenAI index: Integer, id: String, function: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - type: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ } class ToolCall < OpenAI::Internal::Type::BaseModel @@ -158,19 +186,26 @@ module OpenAI OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function ) -> OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function - attr_reader type: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_? + attr_reader type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_? def type=: ( - OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ - ) -> OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + ) -> OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ def initialize: ( index: Integer, ?id: String, ?function: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, - ?type: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + ?type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ ) -> void + def to_hash: -> { + index: Integer, + id: String, + function: OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, + type: OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_ + } + type function = { arguments: String, name: String } class Function < OpenAI::Internal::Type::BaseModel @@ -183,6 +218,8 @@ module OpenAI def name=: (String) -> String def initialize: (?arguments: String, ?name: String) -> void + + def to_hash: -> { arguments: String, name: String } end type type_ = :function @@ -192,7 +229,7 @@ module OpenAI FUNCTION: :function - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::type_] end end end @@ -209,7 +246,7 @@ module OpenAI CONTENT_FILTER: :content_filter FUNCTION_CALL: :function_call - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::Choice::finish_reason] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::Choice::finish_reason] end type logprobs = @@ -227,6 +264,11 @@ module OpenAI content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]?, + refusal: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob]? + } end end @@ -239,7 +281,7 @@ module OpenAI DEFAULT: :default FLEX: :flex - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionChunk::service_tier] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionChunk::service_tier] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part.rbs b/sig/openai/models/chat/chat_completion_content_part.rbs index 77010098..c0750762 100644 --- a/sig/openai/models/chat/chat_completion_content_part.rbs +++ b/sig/openai/models/chat/chat_completion_content_part.rbs @@ -28,6 +28,11 @@ module OpenAI ?type: :file ) -> void + def to_hash: -> { + file: OpenAI::Chat::ChatCompletionContentPart::File::File, + type: :file + } + type file = { file_data: String, file_id: String, filename: String } class File < OpenAI::Internal::Type::BaseModel @@ -48,6 +53,12 @@ module OpenAI ?file_id: String, ?filename: String ) -> void + + def to_hash: -> { + file_data: String, + file_id: String, + filename: String + } end end diff --git a/sig/openai/models/chat/chat_completion_content_part_image.rbs b/sig/openai/models/chat/chat_completion_content_part_image.rbs index f7088321..c8ae374c 100644 --- a/sig/openai/models/chat/chat_completion_content_part_image.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_image.rbs @@ -19,26 +19,36 @@ module OpenAI ?type: :image_url ) -> void + def to_hash: -> { + image_url: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL, + type: :image_url + } + type image_url = { url: String, - detail: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail + detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail } class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String - attr_reader detail: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail? + attr_reader detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail? def detail=: ( - OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail - ) -> OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail + OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail + ) -> OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail def initialize: ( url: String, - ?detail: OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail + ?detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail ) -> void + def to_hash: -> { + url: String, + detail: OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail + } + type detail = :auto | :low | :high module Detail @@ -48,7 +58,7 @@ module OpenAI LOW: :low HIGH: :high - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionContentPartImage::ImageURL::detail] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::detail] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs index 85d1abea..e2818299 100644 --- a/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_input_audio.rbs @@ -19,22 +19,32 @@ module OpenAI ?type: :input_audio ) -> void + def to_hash: -> { + input_audio: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio, + type: :input_audio + } + type input_audio = { data: String, - format_: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ } class InputAudio < OpenAI::Internal::Type::BaseModel attr_accessor data: String - attr_accessor format_: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + attr_accessor format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ def initialize: ( data: String, - format_: OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ ) -> void + def to_hash: -> { + data: String, + format_: OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_ + } + type format_ = :wav | :mp3 module Format @@ -43,7 +53,7 @@ module OpenAI WAV: :wav MP3: :mp3 - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::format_] end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs index 9845f993..3511c222 100644 --- a/sig/openai/models/chat/chat_completion_content_part_refusal.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_refusal.rbs @@ -12,6 +12,8 @@ module OpenAI attr_accessor type: :refusal def initialize: (refusal: String, ?type: :refusal) -> void + + def to_hash: -> { refusal: String, type: :refusal } end end end diff --git a/sig/openai/models/chat/chat_completion_content_part_text.rbs b/sig/openai/models/chat/chat_completion_content_part_text.rbs index 799d8e14..0581e14c 100644 --- a/sig/openai/models/chat/chat_completion_content_part_text.rbs +++ b/sig/openai/models/chat/chat_completion_content_part_text.rbs @@ -11,6 +11,8 @@ module OpenAI attr_accessor type: :text def initialize: (text: String, ?type: :text) -> void + + def to_hash: -> { text: String, type: :text } end end end diff --git a/sig/openai/models/chat/chat_completion_deleted.rbs b/sig/openai/models/chat/chat_completion_deleted.rbs index eb9ae980..c37e21de 100644 --- a/sig/openai/models/chat/chat_completion_deleted.rbs +++ b/sig/openai/models/chat/chat_completion_deleted.rbs @@ -18,6 +18,12 @@ module OpenAI deleted: bool, ?object: :"chat.completion.deleted" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"chat.completion.deleted" + } end end end diff --git a/sig/openai/models/chat/chat_completion_developer_message_param.rbs b/sig/openai/models/chat/chat_completion_developer_message_param.rbs index ecf27a5d..3333b2ce 100644 --- a/sig/openai/models/chat/chat_completion_developer_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_developer_message_param.rbs @@ -5,13 +5,13 @@ module OpenAI module Chat type chat_completion_developer_message_param = { - content: OpenAI::Chat::ChatCompletionDeveloperMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, role: :developer, name: String } class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Chat::ChatCompletionDeveloperMessageParam::content + attr_accessor content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content attr_accessor role: :developer @@ -20,18 +20,24 @@ module OpenAI def name=: (String) -> String def initialize: ( - content: OpenAI::Chat::ChatCompletionDeveloperMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, ?name: String, ?role: :developer ) -> void + def to_hash: -> { + content: OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content, + role: :developer, + name: String + } + type content = String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionDeveloperMessageParam::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_function_call_option.rbs b/sig/openai/models/chat/chat_completion_function_call_option.rbs index b18d80c5..5ce652cc 100644 --- a/sig/openai/models/chat/chat_completion_function_call_option.rbs +++ b/sig/openai/models/chat/chat_completion_function_call_option.rbs @@ -9,6 +9,8 @@ module OpenAI attr_accessor name: String def initialize: (name: String) -> void + + def to_hash: -> { name: String } end end end diff --git a/sig/openai/models/chat/chat_completion_function_message_param.rbs b/sig/openai/models/chat/chat_completion_function_message_param.rbs index 17944160..4bb967bc 100644 --- a/sig/openai/models/chat/chat_completion_function_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_function_message_param.rbs @@ -18,6 +18,8 @@ module OpenAI name: String, ?role: :function ) -> void + + def to_hash: -> { content: String?, name: String, role: :function } end end end diff --git a/sig/openai/models/chat/chat_completion_message.rbs b/sig/openai/models/chat/chat_completion_message.rbs index bea71029..7225d0a0 100644 --- a/sig/openai/models/chat/chat_completion_message.rbs +++ b/sig/openai/models/chat/chat_completion_message.rbs @@ -51,6 +51,16 @@ module OpenAI ?role: :assistant ) -> void + def to_hash: -> { + content: String?, + refusal: String?, + role: :assistant, + annotations: ::Array[OpenAI::Chat::ChatCompletionMessage::Annotation], + audio: OpenAI::Chat::ChatCompletionAudio?, + function_call: OpenAI::Chat::ChatCompletionMessage::FunctionCall, + tool_calls: ::Array[OpenAI::Chat::ChatCompletionMessageToolCall] + } + type annotation = { type: :url_citation, @@ -67,6 +77,11 @@ module OpenAI ?type: :url_citation ) -> void + def to_hash: -> { + type: :url_citation, + url_citation: OpenAI::Chat::ChatCompletionMessage::Annotation::URLCitation + } + type url_citation = { end_index: Integer, @@ -90,6 +105,13 @@ module OpenAI title: String, url: String ) -> void + + def to_hash: -> { + end_index: Integer, + start_index: Integer, + title: String, + url: String + } end end @@ -101,6 +123,8 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void + + def to_hash: -> { arguments: String, name: String } end end end diff --git a/sig/openai/models/chat/chat_completion_message_tool_call.rbs b/sig/openai/models/chat/chat_completion_message_tool_call.rbs index d15d52bf..c787ea9c 100644 --- a/sig/openai/models/chat/chat_completion_message_tool_call.rbs +++ b/sig/openai/models/chat/chat_completion_message_tool_call.rbs @@ -23,6 +23,12 @@ module OpenAI ?type: :function ) -> void + def to_hash: -> { + id: String, + function: OpenAI::Chat::ChatCompletionMessageToolCall::Function, + type: :function + } + type function = { arguments: String, name: String } class Function < OpenAI::Internal::Type::BaseModel @@ -31,6 +37,8 @@ module OpenAI attr_accessor name: String def initialize: (arguments: String, name: String) -> void + + def to_hash: -> { arguments: String, name: String } end end end diff --git a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs index 38e0aeb5..50395d8f 100644 --- a/sig/openai/models/chat/chat_completion_named_tool_choice.rbs +++ b/sig/openai/models/chat/chat_completion_named_tool_choice.rbs @@ -19,12 +19,19 @@ module OpenAI ?type: :function ) -> void + def to_hash: -> { + function: OpenAI::Chat::ChatCompletionNamedToolChoice::Function, + type: :function + } + type function = { name: String } class Function < OpenAI::Internal::Type::BaseModel attr_accessor name: String def initialize: (name: String) -> void + + def to_hash: -> { name: String } end end end diff --git a/sig/openai/models/chat/chat_completion_prediction_content.rbs b/sig/openai/models/chat/chat_completion_prediction_content.rbs index 65a4a7a6..c52fc481 100644 --- a/sig/openai/models/chat/chat_completion_prediction_content.rbs +++ b/sig/openai/models/chat/chat_completion_prediction_content.rbs @@ -5,27 +5,32 @@ module OpenAI module Chat type chat_completion_prediction_content = { - content: OpenAI::Chat::ChatCompletionPredictionContent::content, + content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, type: :content } class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Chat::ChatCompletionPredictionContent::content + attr_accessor content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content attr_accessor type: :content def initialize: ( - content: OpenAI::Chat::ChatCompletionPredictionContent::content, + content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, ?type: :content ) -> void + def to_hash: -> { + content: OpenAI::Models::Chat::ChatCompletionPredictionContent::content, + type: :content + } + type content = String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionPredictionContent::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionPredictionContent::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_store_message.rbs b/sig/openai/models/chat/chat_completion_store_message.rbs index 13440ab9..7b236cff 100644 --- a/sig/openai/models/chat/chat_completion_store_message.rbs +++ b/sig/openai/models/chat/chat_completion_store_message.rbs @@ -11,6 +11,8 @@ module OpenAI def id=: (String _) -> String def initialize: (id: String) -> void + + def to_hash: -> { id: String } end end end diff --git a/sig/openai/models/chat/chat_completion_stream_options.rbs b/sig/openai/models/chat/chat_completion_stream_options.rbs index 4b0267d1..6905d394 100644 --- a/sig/openai/models/chat/chat_completion_stream_options.rbs +++ b/sig/openai/models/chat/chat_completion_stream_options.rbs @@ -11,6 +11,8 @@ module OpenAI def include_usage=: (bool) -> bool def initialize: (?include_usage: bool) -> void + + def to_hash: -> { include_usage: bool } end end end diff --git a/sig/openai/models/chat/chat_completion_system_message_param.rbs b/sig/openai/models/chat/chat_completion_system_message_param.rbs index 067f582c..e1cd80e9 100644 --- a/sig/openai/models/chat/chat_completion_system_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_system_message_param.rbs @@ -5,13 +5,13 @@ module OpenAI module Chat type chat_completion_system_message_param = { - content: OpenAI::Chat::ChatCompletionSystemMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, role: :system, name: String } class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Chat::ChatCompletionSystemMessageParam::content + attr_accessor content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content attr_accessor role: :system @@ -20,18 +20,24 @@ module OpenAI def name=: (String) -> String def initialize: ( - content: OpenAI::Chat::ChatCompletionSystemMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, ?name: String, ?role: :system ) -> void + def to_hash: -> { + content: OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content, + role: :system, + name: String + } + type content = String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionSystemMessageParam::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionSystemMessageParam::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_token_logprob.rbs b/sig/openai/models/chat/chat_completion_token_logprob.rbs index f2bd18a1..3ad85411 100644 --- a/sig/openai/models/chat/chat_completion_token_logprob.rbs +++ b/sig/openai/models/chat/chat_completion_token_logprob.rbs @@ -27,6 +27,13 @@ module OpenAI top_logprobs: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] ) -> void + def to_hash: -> { + token: String, + bytes: ::Array[Integer]?, + logprob: Float, + top_logprobs: ::Array[OpenAI::Chat::ChatCompletionTokenLogprob::TopLogprob] + } + type top_logprob = { token: String, bytes: ::Array[Integer]?, logprob: Float } @@ -42,6 +49,12 @@ module OpenAI bytes: ::Array[Integer]?, logprob: Float ) -> void + + def to_hash: -> { + token: String, + bytes: ::Array[Integer]?, + logprob: Float + } end end end diff --git a/sig/openai/models/chat/chat_completion_tool.rbs b/sig/openai/models/chat/chat_completion_tool.rbs index e5d54e1b..23153c68 100644 --- a/sig/openai/models/chat/chat_completion_tool.rbs +++ b/sig/openai/models/chat/chat_completion_tool.rbs @@ -15,6 +15,11 @@ module OpenAI function: OpenAI::FunctionDefinition, ?type: :function ) -> void + + def to_hash: -> { + function: OpenAI::FunctionDefinition, + type: :function + } end end end diff --git a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs index a5c2df81..523db9a4 100644 --- a/sig/openai/models/chat/chat_completion_tool_choice_option.rbs +++ b/sig/openai/models/chat/chat_completion_tool_choice_option.rbs @@ -4,7 +4,7 @@ module OpenAI module Chat type chat_completion_tool_choice_option = - OpenAI::Chat::ChatCompletionToolChoiceOption::auto + OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto | OpenAI::Chat::ChatCompletionNamedToolChoice module ChatCompletionToolChoiceOption @@ -19,7 +19,7 @@ module OpenAI AUTO: :auto REQUIRED: :required - def self?.values: -> ::Array[OpenAI::Chat::ChatCompletionToolChoiceOption::auto] + def self?.values: -> ::Array[OpenAI::Models::Chat::ChatCompletionToolChoiceOption::auto] end def self?.variants: -> ::Array[OpenAI::Models::Chat::chat_completion_tool_choice_option] diff --git a/sig/openai/models/chat/chat_completion_tool_message_param.rbs b/sig/openai/models/chat/chat_completion_tool_message_param.rbs index 21997531..10c157ab 100644 --- a/sig/openai/models/chat/chat_completion_tool_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_tool_message_param.rbs @@ -5,31 +5,37 @@ module OpenAI module Chat type chat_completion_tool_message_param = { - content: OpenAI::Chat::ChatCompletionToolMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, role: :tool, tool_call_id: String } class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Chat::ChatCompletionToolMessageParam::content + attr_accessor content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content attr_accessor role: :tool attr_accessor tool_call_id: String def initialize: ( - content: OpenAI::Chat::ChatCompletionToolMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, tool_call_id: String, ?role: :tool ) -> void + def to_hash: -> { + content: OpenAI::Models::Chat::ChatCompletionToolMessageParam::content, + role: :tool, + tool_call_id: String + } + type content = String | ::Array[OpenAI::Chat::ChatCompletionContentPartText] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionToolMessageParam::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionToolMessageParam::content] ChatCompletionContentPartTextArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/chat_completion_user_message_param.rbs b/sig/openai/models/chat/chat_completion_user_message_param.rbs index 9bc5d59c..a021b82a 100644 --- a/sig/openai/models/chat/chat_completion_user_message_param.rbs +++ b/sig/openai/models/chat/chat_completion_user_message_param.rbs @@ -5,13 +5,13 @@ module OpenAI module Chat type chat_completion_user_message_param = { - content: OpenAI::Chat::ChatCompletionUserMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, role: :user, name: String } class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Chat::ChatCompletionUserMessageParam::content + attr_accessor content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content attr_accessor role: :user @@ -20,18 +20,24 @@ module OpenAI def name=: (String) -> String def initialize: ( - content: OpenAI::Chat::ChatCompletionUserMessageParam::content, + content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, ?name: String, ?role: :user ) -> void + def to_hash: -> { + content: OpenAI::Models::Chat::ChatCompletionUserMessageParam::content, + role: :user, + name: String + } + type content = String | ::Array[OpenAI::Models::Chat::chat_completion_content_part] module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Chat::ChatCompletionUserMessageParam::content] + def self?.variants: -> ::Array[OpenAI::Models::Chat::ChatCompletionUserMessageParam::content] ChatCompletionContentPartArray: OpenAI::Internal::Type::Converter end diff --git a/sig/openai/models/chat/completion_create_params.rbs b/sig/openai/models/chat/completion_create_params.rbs index 05054037..0f3ad11e 100644 --- a/sig/openai/models/chat/completion_create_params.rbs +++ b/sig/openai/models/chat/completion_create_params.rbs @@ -14,7 +14,7 @@ module OpenAI max_completion_tokens: Integer?, max_tokens: Integer?, metadata: OpenAI::Models::metadata?, - modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, + modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, n: Integer?, parallel_tool_calls: bool, prediction: OpenAI::Chat::ChatCompletionPredictionContent?, @@ -70,7 +70,7 @@ module OpenAI attr_accessor metadata: OpenAI::Models::metadata? - attr_accessor modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]? + attr_accessor modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]? attr_accessor n: Integer? @@ -140,7 +140,7 @@ module OpenAI ?max_completion_tokens: Integer?, ?max_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, ?n: Integer?, ?parallel_tool_calls: bool, ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, @@ -162,6 +162,40 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + messages: ::Array[OpenAI::Models::Chat::chat_completion_message_param], + model: OpenAI::Models::Chat::CompletionCreateParams::model, + audio: OpenAI::Chat::ChatCompletionAudioParam?, + frequency_penalty: Float?, + function_call: OpenAI::Models::Chat::CompletionCreateParams::function_call, + functions: ::Array[OpenAI::Chat::CompletionCreateParams::Function], + logit_bias: ::Hash[Symbol, Integer]?, + logprobs: bool?, + max_completion_tokens: Integer?, + max_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, + n: Integer?, + parallel_tool_calls: bool, + prediction: OpenAI::Chat::ChatCompletionPredictionContent?, + presence_penalty: Float?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + response_format: OpenAI::Models::Chat::CompletionCreateParams::response_format, + seed: Integer?, + service_tier: OpenAI::Models::Chat::CompletionCreateParams::service_tier?, + stop: OpenAI::Models::Chat::CompletionCreateParams::stop?, + store: bool?, + stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, + temperature: Float?, + tool_choice: OpenAI::Models::Chat::chat_completion_tool_choice_option, + tools: ::Array[OpenAI::Chat::ChatCompletionTool], + top_logprobs: Integer?, + top_p: Float?, + user: String, + web_search_options: OpenAI::Chat::CompletionCreateParams::WebSearchOptions, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::chat_model module Model @@ -171,7 +205,7 @@ module OpenAI end type function_call = - OpenAI::Chat::CompletionCreateParams::FunctionCall::function_call_mode + OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode | OpenAI::Chat::ChatCompletionFunctionCallOption module FunctionCall @@ -185,7 +219,7 @@ module OpenAI NONE: :none AUTO: :auto - def self?.values: -> ::Array[OpenAI::Chat::CompletionCreateParams::FunctionCall::function_call_mode] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::function_call_mode] end def self?.variants: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::function_call] @@ -216,6 +250,12 @@ module OpenAI ?description: String, ?parameters: OpenAI::Models::function_parameters ) -> void + + def to_hash: -> { + name: String, + description: String, + parameters: OpenAI::Models::function_parameters + } end type modality = :text | :audio @@ -226,7 +266,7 @@ module OpenAI TEXT: :text AUDIO: :audio - def self?.values: -> ::Array[OpenAI::Chat::CompletionCreateParams::modality] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality] end type response_format = @@ -264,24 +304,29 @@ module OpenAI type web_search_options = { - search_context_size: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, + search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? } class WebSearchOptions < OpenAI::Internal::Type::BaseModel - attr_reader search_context_size: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size? + attr_reader search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size? def search_context_size=: ( - OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size - ) -> OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size + OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size + ) -> OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size attr_accessor user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? def initialize: ( - ?search_context_size: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, + ?search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, ?user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? ) -> void + def to_hash: -> { + search_context_size: OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size, + user_location: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation? + } + type search_context_size = :low | :medium | :high module SearchContextSize @@ -291,7 +336,7 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self?.values: -> ::Array[OpenAI::Chat::CompletionCreateParams::WebSearchOptions::search_context_size] + def self?.values: -> ::Array[OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::search_context_size] end type user_location = @@ -310,6 +355,11 @@ module OpenAI ?type: :approximate ) -> void + def to_hash: -> { + approximate: OpenAI::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate, + type: :approximate + } + type approximate = { city: String, @@ -341,6 +391,13 @@ module OpenAI ?region: String, ?timezone: String ) -> void + + def to_hash: -> { + city: String, + country: String, + region: String, + timezone: String + } end end end diff --git a/sig/openai/models/chat/completion_delete_params.rbs b/sig/openai/models/chat/completion_delete_params.rbs index e20bc3a4..d3ddc656 100644 --- a/sig/openai/models/chat/completion_delete_params.rbs +++ b/sig/openai/models/chat/completion_delete_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/chat/completion_list_params.rbs b/sig/openai/models/chat/completion_list_params.rbs index 83b22904..692b13c1 100644 --- a/sig/openai/models/chat/completion_list_params.rbs +++ b/sig/openai/models/chat/completion_list_params.rbs @@ -44,6 +44,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + metadata: OpenAI::Models::metadata?, + model: String, + order: OpenAI::Models::Chat::CompletionListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/chat/completion_retrieve_params.rbs b/sig/openai/models/chat/completion_retrieve_params.rbs index 8d4b832e..e91d58a6 100644 --- a/sig/openai/models/chat/completion_retrieve_params.rbs +++ b/sig/openai/models/chat/completion_retrieve_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/chat/completion_update_params.rbs b/sig/openai/models/chat/completion_update_params.rbs index 942741c4..6a169ae6 100644 --- a/sig/openai/models/chat/completion_update_params.rbs +++ b/sig/openai/models/chat/completion_update_params.rbs @@ -15,6 +15,11 @@ module OpenAI metadata: OpenAI::Models::metadata?, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + metadata: OpenAI::Models::metadata?, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/chat/completions/message_list_params.rbs b/sig/openai/models/chat/completions/message_list_params.rbs index 0257770c..0e65a06b 100644 --- a/sig/openai/models/chat/completions/message_list_params.rbs +++ b/sig/openai/models/chat/completions/message_list_params.rbs @@ -35,6 +35,13 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::Chat::Completions::MessageListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/chat_model.rbs b/sig/openai/models/chat_model.rbs index 8f80d09b..52519f63 100644 --- a/sig/openai/models/chat_model.rbs +++ b/sig/openai/models/chat_model.rbs @@ -33,6 +33,7 @@ module OpenAI | :"gpt-4o-search-preview-2025-03-11" | :"gpt-4o-mini-search-preview-2025-03-11" | :"chatgpt-4o-latest" + | :"codex-mini-latest" | :"gpt-4o-mini" | :"gpt-4o-mini-2024-07-18" | :"gpt-4-turbo" @@ -90,6 +91,7 @@ module OpenAI GPT_4O_SEARCH_PREVIEW_2025_03_11: :"gpt-4o-search-preview-2025-03-11" GPT_4O_MINI_SEARCH_PREVIEW_2025_03_11: :"gpt-4o-mini-search-preview-2025-03-11" CHATGPT_4O_LATEST: :"chatgpt-4o-latest" + CODEX_MINI_LATEST: :"codex-mini-latest" GPT_4O_MINI: :"gpt-4o-mini" GPT_4O_MINI_2024_07_18: :"gpt-4o-mini-2024-07-18" GPT_4_TURBO: :"gpt-4-turbo" diff --git a/sig/openai/models/comparison_filter.rbs b/sig/openai/models/comparison_filter.rbs index a2288002..f5f686ba 100644 --- a/sig/openai/models/comparison_filter.rbs +++ b/sig/openai/models/comparison_filter.rbs @@ -3,23 +3,29 @@ module OpenAI type comparison_filter = { key: String, - type: OpenAI::ComparisonFilter::type_, - value: OpenAI::ComparisonFilter::value + type: OpenAI::Models::ComparisonFilter::type_, + value: OpenAI::Models::ComparisonFilter::value } class ComparisonFilter < OpenAI::Internal::Type::BaseModel attr_accessor key: String - attr_accessor type: OpenAI::ComparisonFilter::type_ + attr_accessor type: OpenAI::Models::ComparisonFilter::type_ - attr_accessor value: OpenAI::ComparisonFilter::value + attr_accessor value: OpenAI::Models::ComparisonFilter::value def initialize: ( key: String, - type: OpenAI::ComparisonFilter::type_, - value: OpenAI::ComparisonFilter::value + type: OpenAI::Models::ComparisonFilter::type_, + value: OpenAI::Models::ComparisonFilter::value ) -> void + def to_hash: -> { + key: String, + type: OpenAI::Models::ComparisonFilter::type_, + value: OpenAI::Models::ComparisonFilter::value + } + type type_ = :eq | :ne | :gt | :gte | :lt | :lte module Type @@ -32,7 +38,7 @@ module OpenAI LT: :lt LTE: :lte - def self?.values: -> ::Array[OpenAI::ComparisonFilter::type_] + def self?.values: -> ::Array[OpenAI::Models::ComparisonFilter::type_] end type value = String | Float | bool @@ -40,7 +46,7 @@ module OpenAI module Value extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::ComparisonFilter::value] + def self?.variants: -> ::Array[OpenAI::Models::ComparisonFilter::value] end end end diff --git a/sig/openai/models/completion.rbs b/sig/openai/models/completion.rbs index a9d8a71b..40b716e4 100644 --- a/sig/openai/models/completion.rbs +++ b/sig/openai/models/completion.rbs @@ -39,6 +39,16 @@ module OpenAI ?usage: OpenAI::CompletionUsage, ?object: :text_completion ) -> void + + def to_hash: -> { + id: String, + choices: ::Array[OpenAI::CompletionChoice], + created: Integer, + model: String, + object: :text_completion, + system_fingerprint: String, + usage: OpenAI::CompletionUsage + } end end end diff --git a/sig/openai/models/completion_choice.rbs b/sig/openai/models/completion_choice.rbs index a42cad21..53a0ea09 100644 --- a/sig/openai/models/completion_choice.rbs +++ b/sig/openai/models/completion_choice.rbs @@ -2,14 +2,14 @@ module OpenAI module Models type completion_choice = { - finish_reason: OpenAI::CompletionChoice::finish_reason, + finish_reason: OpenAI::Models::CompletionChoice::finish_reason, index: Integer, logprobs: OpenAI::CompletionChoice::Logprobs?, text: String } class CompletionChoice < OpenAI::Internal::Type::BaseModel - attr_accessor finish_reason: OpenAI::CompletionChoice::finish_reason + attr_accessor finish_reason: OpenAI::Models::CompletionChoice::finish_reason attr_accessor index: Integer @@ -18,12 +18,19 @@ module OpenAI attr_accessor text: String def initialize: ( - finish_reason: OpenAI::CompletionChoice::finish_reason, + finish_reason: OpenAI::Models::CompletionChoice::finish_reason, index: Integer, logprobs: OpenAI::CompletionChoice::Logprobs?, text: String ) -> void + def to_hash: -> { + finish_reason: OpenAI::Models::CompletionChoice::finish_reason, + index: Integer, + logprobs: OpenAI::CompletionChoice::Logprobs?, + text: String + } + type finish_reason = :stop | :length | :content_filter module FinishReason @@ -33,7 +40,7 @@ module OpenAI LENGTH: :length CONTENT_FILTER: :content_filter - def self?.values: -> ::Array[OpenAI::CompletionChoice::finish_reason] + def self?.values: -> ::Array[OpenAI::Models::CompletionChoice::finish_reason] end type logprobs = @@ -69,6 +76,13 @@ module OpenAI ?tokens: ::Array[String], ?top_logprobs: ::Array[::Hash[Symbol, Float]] ) -> void + + def to_hash: -> { + text_offset: ::Array[Integer], + token_logprobs: ::Array[Float], + tokens: ::Array[String], + top_logprobs: ::Array[::Hash[Symbol, Float]] + } end end end diff --git a/sig/openai/models/completion_create_params.rbs b/sig/openai/models/completion_create_params.rbs index f2a4f357..5dcdfb9d 100644 --- a/sig/openai/models/completion_create_params.rbs +++ b/sig/openai/models/completion_create_params.rbs @@ -83,6 +83,27 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + model: OpenAI::Models::CompletionCreateParams::model, + prompt: OpenAI::Models::CompletionCreateParams::prompt?, + best_of: Integer?, + echo: bool?, + frequency_penalty: Float?, + logit_bias: ::Hash[Symbol, Integer]?, + logprobs: Integer?, + max_tokens: Integer?, + n: Integer?, + presence_penalty: Float?, + seed: Integer?, + stop: OpenAI::Models::CompletionCreateParams::stop?, + stream_options: OpenAI::Chat::ChatCompletionStreamOptions?, + suffix: String?, + temperature: Float?, + top_p: Float?, + user: String, + request_options: OpenAI::RequestOptions + } + type model = String | :"gpt-3.5-turbo-instruct" | :"davinci-002" | :"babbage-002" diff --git a/sig/openai/models/completion_usage.rbs b/sig/openai/models/completion_usage.rbs index f50f1dad..d70bb65e 100644 --- a/sig/openai/models/completion_usage.rbs +++ b/sig/openai/models/completion_usage.rbs @@ -36,6 +36,14 @@ module OpenAI ?prompt_tokens_details: OpenAI::CompletionUsage::PromptTokensDetails ) -> void + def to_hash: -> { + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer, + completion_tokens_details: OpenAI::CompletionUsage::CompletionTokensDetails, + prompt_tokens_details: OpenAI::CompletionUsage::PromptTokensDetails + } + type completion_tokens_details = { accepted_prediction_tokens: Integer, @@ -67,6 +75,13 @@ module OpenAI ?reasoning_tokens: Integer, ?rejected_prediction_tokens: Integer ) -> void + + def to_hash: -> { + accepted_prediction_tokens: Integer, + audio_tokens: Integer, + reasoning_tokens: Integer, + rejected_prediction_tokens: Integer + } end type prompt_tokens_details = @@ -85,6 +100,8 @@ module OpenAI ?audio_tokens: Integer, ?cached_tokens: Integer ) -> void + + def to_hash: -> { audio_tokens: Integer, cached_tokens: Integer } end end end diff --git a/sig/openai/models/compound_filter.rbs b/sig/openai/models/compound_filter.rbs index f5c17954..ccf1036d 100644 --- a/sig/openai/models/compound_filter.rbs +++ b/sig/openai/models/compound_filter.rbs @@ -2,26 +2,31 @@ module OpenAI module Models type compound_filter = { - filters: ::Array[OpenAI::CompoundFilter::filter], - type: OpenAI::CompoundFilter::type_ + filters: ::Array[OpenAI::Models::CompoundFilter::filter], + type: OpenAI::Models::CompoundFilter::type_ } class CompoundFilter < OpenAI::Internal::Type::BaseModel - attr_accessor filters: ::Array[OpenAI::CompoundFilter::filter] + attr_accessor filters: ::Array[OpenAI::Models::CompoundFilter::filter] - attr_accessor type: OpenAI::CompoundFilter::type_ + attr_accessor type: OpenAI::Models::CompoundFilter::type_ def initialize: ( - filters: ::Array[OpenAI::CompoundFilter::filter], - type: OpenAI::CompoundFilter::type_ + filters: ::Array[OpenAI::Models::CompoundFilter::filter], + type: OpenAI::Models::CompoundFilter::type_ ) -> void + def to_hash: -> { + filters: ::Array[OpenAI::Models::CompoundFilter::filter], + type: OpenAI::Models::CompoundFilter::type_ + } + type filter = OpenAI::ComparisonFilter | top module Filter extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::CompoundFilter::filter] + def self?.variants: -> ::Array[OpenAI::Models::CompoundFilter::filter] end type type_ = :and | :or @@ -32,7 +37,7 @@ module OpenAI AND: :and OR: :or - def self?.values: -> ::Array[OpenAI::CompoundFilter::type_] + def self?.values: -> ::Array[OpenAI::Models::CompoundFilter::type_] end end end diff --git a/sig/openai/models/container_create_params.rbs b/sig/openai/models/container_create_params.rbs new file mode 100644 index 00000000..3c51ec67 --- /dev/null +++ b/sig/openai/models/container_create_params.rbs @@ -0,0 +1,74 @@ +module OpenAI + module Models + type container_create_params = + { + name: String, + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter, + file_ids: ::Array[String] + } + & OpenAI::Internal::Type::request_parameters + + class ContainerCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor name: String + + attr_reader expires_after: OpenAI::ContainerCreateParams::ExpiresAfter? + + def expires_after=: ( + OpenAI::ContainerCreateParams::ExpiresAfter + ) -> OpenAI::ContainerCreateParams::ExpiresAfter + + attr_reader file_ids: ::Array[String]? + + def file_ids=: (::Array[String]) -> ::Array[String] + + def initialize: ( + name: String, + ?expires_after: OpenAI::ContainerCreateParams::ExpiresAfter, + ?file_ids: ::Array[String], + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + name: String, + expires_after: OpenAI::ContainerCreateParams::ExpiresAfter, + file_ids: ::Array[String], + request_options: OpenAI::RequestOptions + } + + type expires_after = + { + anchor: OpenAI::Models::ContainerCreateParams::ExpiresAfter::anchor, + minutes: Integer + } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: OpenAI::Models::ContainerCreateParams::ExpiresAfter::anchor + + attr_accessor minutes: Integer + + def initialize: ( + anchor: OpenAI::Models::ContainerCreateParams::ExpiresAfter::anchor, + minutes: Integer + ) -> void + + def to_hash: -> { + anchor: OpenAI::Models::ContainerCreateParams::ExpiresAfter::anchor, + minutes: Integer + } + + type anchor = :last_active_at + + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT: :last_active_at + + def self?.values: -> ::Array[OpenAI::Models::ContainerCreateParams::ExpiresAfter::anchor] + end + end + end + end +end diff --git a/sig/openai/models/container_create_response.rbs b/sig/openai/models/container_create_response.rbs new file mode 100644 index 00000000..ddc8f23d --- /dev/null +++ b/sig/openai/models/container_create_response.rbs @@ -0,0 +1,87 @@ +module OpenAI + module Models + type container_create_response = + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter + } + + class ContainerCreateResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor name: String + + attr_accessor object: String + + attr_accessor status: String + + attr_reader expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter? + + def expires_after=: ( + OpenAI::Models::ContainerCreateResponse::ExpiresAfter + ) -> OpenAI::Models::ContainerCreateResponse::ExpiresAfter + + def initialize: ( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + ?expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter + } + + type expires_after = + { + anchor: OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor, + minutes: Integer + } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_reader anchor: OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor? + + def anchor=: ( + OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor + ) -> OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor + + attr_reader minutes: Integer? + + def minutes=: (Integer) -> Integer + + def initialize: ( + ?anchor: OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor, + ?minutes: Integer + ) -> void + + def to_hash: -> { + anchor: OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor, + minutes: Integer + } + + type anchor = :last_active_at + + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT: :last_active_at + + def self?.values: -> ::Array[OpenAI::Models::ContainerCreateResponse::ExpiresAfter::anchor] + end + end + end + end +end diff --git a/sig/openai/models/container_delete_params.rbs b/sig/openai/models/container_delete_params.rbs new file mode 100644 index 00000000..940b54a7 --- /dev/null +++ b/sig/openai/models/container_delete_params.rbs @@ -0,0 +1,15 @@ +module OpenAI + module Models + type container_delete_params = + { } & OpenAI::Internal::Type::request_parameters + + class ContainerDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } + end + end +end diff --git a/sig/openai/models/container_list_params.rbs b/sig/openai/models/container_list_params.rbs new file mode 100644 index 00000000..ec6092bb --- /dev/null +++ b/sig/openai/models/container_list_params.rbs @@ -0,0 +1,55 @@ +module OpenAI + module Models + type container_list_params = + { + after: String, + limit: Integer, + order: OpenAI::Models::ContainerListParams::order + } + & OpenAI::Internal::Type::request_parameters + + class ContainerListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_reader after: String? + + def after=: (String) -> String + + attr_reader limit: Integer? + + def limit=: (Integer) -> Integer + + attr_reader order: OpenAI::Models::ContainerListParams::order? + + def order=: ( + OpenAI::Models::ContainerListParams::order + ) -> OpenAI::Models::ContainerListParams::order + + def initialize: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::ContainerListParams::order, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::ContainerListParams::order, + request_options: OpenAI::RequestOptions + } + + type order = :asc | :desc + + module Order + extend OpenAI::Internal::Type::Enum + + ASC: :asc + DESC: :desc + + def self?.values: -> ::Array[OpenAI::Models::ContainerListParams::order] + end + end + end +end diff --git a/sig/openai/models/container_list_response.rbs b/sig/openai/models/container_list_response.rbs new file mode 100644 index 00000000..97f971f0 --- /dev/null +++ b/sig/openai/models/container_list_response.rbs @@ -0,0 +1,87 @@ +module OpenAI + module Models + type container_list_response = + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter + } + + class ContainerListResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor name: String + + attr_accessor object: String + + attr_accessor status: String + + attr_reader expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter? + + def expires_after=: ( + OpenAI::Models::ContainerListResponse::ExpiresAfter + ) -> OpenAI::Models::ContainerListResponse::ExpiresAfter + + def initialize: ( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + ?expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter + } + + type expires_after = + { + anchor: OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor, + minutes: Integer + } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_reader anchor: OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor? + + def anchor=: ( + OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor + ) -> OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor + + attr_reader minutes: Integer? + + def minutes=: (Integer) -> Integer + + def initialize: ( + ?anchor: OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor, + ?minutes: Integer + ) -> void + + def to_hash: -> { + anchor: OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor, + minutes: Integer + } + + type anchor = :last_active_at + + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT: :last_active_at + + def self?.values: -> ::Array[OpenAI::Models::ContainerListResponse::ExpiresAfter::anchor] + end + end + end + end +end diff --git a/sig/openai/models/container_retrieve_params.rbs b/sig/openai/models/container_retrieve_params.rbs new file mode 100644 index 00000000..74ca0b0c --- /dev/null +++ b/sig/openai/models/container_retrieve_params.rbs @@ -0,0 +1,15 @@ +module OpenAI + module Models + type container_retrieve_params = + { } & OpenAI::Internal::Type::request_parameters + + class ContainerRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } + end + end +end diff --git a/sig/openai/models/container_retrieve_response.rbs b/sig/openai/models/container_retrieve_response.rbs new file mode 100644 index 00000000..fac17ec3 --- /dev/null +++ b/sig/openai/models/container_retrieve_response.rbs @@ -0,0 +1,87 @@ +module OpenAI + module Models + type container_retrieve_response = + { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + } + + class ContainerRetrieveResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor created_at: Integer + + attr_accessor name: String + + attr_accessor object: String + + attr_accessor status: String + + attr_reader expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter? + + def expires_after=: ( + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + ) -> OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + + def initialize: ( + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + ?expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter + } + + type expires_after = + { + anchor: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor, + minutes: Integer + } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_reader anchor: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor? + + def anchor=: ( + OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor + ) -> OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor + + attr_reader minutes: Integer? + + def minutes=: (Integer) -> Integer + + def initialize: ( + ?anchor: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor, + ?minutes: Integer + ) -> void + + def to_hash: -> { + anchor: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor, + minutes: Integer + } + + type anchor = :last_active_at + + module Anchor + extend OpenAI::Internal::Type::Enum + + LAST_ACTIVE_AT: :last_active_at + + def self?.values: -> ::Array[OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter::anchor] + end + end + end + end +end diff --git a/sig/openai/models/containers/file_create_params.rbs b/sig/openai/models/containers/file_create_params.rbs new file mode 100644 index 00000000..8ebe9ad8 --- /dev/null +++ b/sig/openai/models/containers/file_create_params.rbs @@ -0,0 +1,36 @@ +module OpenAI + module Models + module Containers + type file_create_params = + { file: OpenAI::Internal::file_input, file_id: String } + & OpenAI::Internal::Type::request_parameters + + class FileCreateParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_reader file: OpenAI::Internal::file_input? + + def file=: ( + OpenAI::Internal::file_input + ) -> OpenAI::Internal::file_input + + attr_reader file_id: String? + + def file_id=: (String) -> String + + def initialize: ( + ?file: OpenAI::Internal::file_input, + ?file_id: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + file: OpenAI::Internal::file_input, + file_id: String, + request_options: OpenAI::RequestOptions + } + end + end + end +end diff --git a/sig/openai/models/containers/file_create_response.rbs b/sig/openai/models/containers/file_create_response.rbs new file mode 100644 index 00000000..ccb96c98 --- /dev/null +++ b/sig/openai/models/containers/file_create_response.rbs @@ -0,0 +1,52 @@ +module OpenAI + module Models + module Containers + type file_create_response = + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } + + class FileCreateResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor bytes: Integer + + attr_accessor container_id: String + + attr_accessor created_at: Integer + + attr_accessor object: :"container.file" + + attr_accessor path: String + + attr_accessor source: String + + def initialize: ( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + ?object: :"container.file" + ) -> void + + def to_hash: -> { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } + end + end + end +end diff --git a/sig/openai/models/containers/file_delete_params.rbs b/sig/openai/models/containers/file_delete_params.rbs new file mode 100644 index 00000000..b8cd479f --- /dev/null +++ b/sig/openai/models/containers/file_delete_params.rbs @@ -0,0 +1,25 @@ +module OpenAI + module Models + module Containers + type file_delete_params = + { container_id: String } & OpenAI::Internal::Type::request_parameters + + class FileDeleteParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor container_id: String + + def initialize: ( + container_id: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + container_id: String, + request_options: OpenAI::RequestOptions + } + end + end + end +end diff --git a/sig/openai/models/containers/file_list_params.rbs b/sig/openai/models/containers/file_list_params.rbs new file mode 100644 index 00000000..8ea4c3aa --- /dev/null +++ b/sig/openai/models/containers/file_list_params.rbs @@ -0,0 +1,57 @@ +module OpenAI + module Models + module Containers + type file_list_params = + { + after: String, + limit: Integer, + order: OpenAI::Models::Containers::FileListParams::order + } + & OpenAI::Internal::Type::request_parameters + + class FileListParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_reader after: String? + + def after=: (String) -> String + + attr_reader limit: Integer? + + def limit=: (Integer) -> Integer + + attr_reader order: OpenAI::Models::Containers::FileListParams::order? + + def order=: ( + OpenAI::Models::Containers::FileListParams::order + ) -> OpenAI::Models::Containers::FileListParams::order + + def initialize: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Containers::FileListParams::order, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::Containers::FileListParams::order, + request_options: OpenAI::RequestOptions + } + + type order = :asc | :desc + + module Order + extend OpenAI::Internal::Type::Enum + + ASC: :asc + DESC: :desc + + def self?.values: -> ::Array[OpenAI::Models::Containers::FileListParams::order] + end + end + end + end +end diff --git a/sig/openai/models/containers/file_list_response.rbs b/sig/openai/models/containers/file_list_response.rbs new file mode 100644 index 00000000..1314ef0f --- /dev/null +++ b/sig/openai/models/containers/file_list_response.rbs @@ -0,0 +1,52 @@ +module OpenAI + module Models + module Containers + type file_list_response = + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } + + class FileListResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor bytes: Integer + + attr_accessor container_id: String + + attr_accessor created_at: Integer + + attr_accessor object: :"container.file" + + attr_accessor path: String + + attr_accessor source: String + + def initialize: ( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + ?object: :"container.file" + ) -> void + + def to_hash: -> { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } + end + end + end +end diff --git a/sig/openai/models/containers/file_retrieve_params.rbs b/sig/openai/models/containers/file_retrieve_params.rbs new file mode 100644 index 00000000..aba88985 --- /dev/null +++ b/sig/openai/models/containers/file_retrieve_params.rbs @@ -0,0 +1,25 @@ +module OpenAI + module Models + module Containers + type file_retrieve_params = + { container_id: String } & OpenAI::Internal::Type::request_parameters + + class FileRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor container_id: String + + def initialize: ( + container_id: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + container_id: String, + request_options: OpenAI::RequestOptions + } + end + end + end +end diff --git a/sig/openai/models/containers/file_retrieve_response.rbs b/sig/openai/models/containers/file_retrieve_response.rbs new file mode 100644 index 00000000..33e75b90 --- /dev/null +++ b/sig/openai/models/containers/file_retrieve_response.rbs @@ -0,0 +1,52 @@ +module OpenAI + module Models + module Containers + type file_retrieve_response = + { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } + + class FileRetrieveResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor bytes: Integer + + attr_accessor container_id: String + + attr_accessor created_at: Integer + + attr_accessor object: :"container.file" + + attr_accessor path: String + + attr_accessor source: String + + def initialize: ( + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + path: String, + source: String, + ?object: :"container.file" + ) -> void + + def to_hash: -> { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: :"container.file", + path: String, + source: String + } + end + end + end +end diff --git a/sig/openai/models/containers/files/content_retrieve_params.rbs b/sig/openai/models/containers/files/content_retrieve_params.rbs new file mode 100644 index 00000000..8912648d --- /dev/null +++ b/sig/openai/models/containers/files/content_retrieve_params.rbs @@ -0,0 +1,27 @@ +module OpenAI + module Models + module Containers + module Files + type content_retrieve_params = + { container_id: String } & OpenAI::Internal::Type::request_parameters + + class ContentRetrieveParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + attr_accessor container_id: String + + def initialize: ( + container_id: String, + ?request_options: OpenAI::request_opts + ) -> void + + def to_hash: -> { + container_id: String, + request_options: OpenAI::RequestOptions + } + end + end + end + end +end diff --git a/sig/openai/models/create_embedding_response.rbs b/sig/openai/models/create_embedding_response.rbs index d406e7da..92433cab 100644 --- a/sig/openai/models/create_embedding_response.rbs +++ b/sig/openai/models/create_embedding_response.rbs @@ -24,6 +24,13 @@ module OpenAI ?object: :list ) -> void + def to_hash: -> { + data: ::Array[OpenAI::Embedding], + model: String, + object: :list, + usage: OpenAI::CreateEmbeddingResponse::Usage + } + type usage = { prompt_tokens: Integer, total_tokens: Integer } class Usage < OpenAI::Internal::Type::BaseModel @@ -32,6 +39,8 @@ module OpenAI attr_accessor total_tokens: Integer def initialize: (prompt_tokens: Integer, total_tokens: Integer) -> void + + def to_hash: -> { prompt_tokens: Integer, total_tokens: Integer } end end end diff --git a/sig/openai/models/embedding.rbs b/sig/openai/models/embedding.rbs index 88478e43..3c411347 100644 --- a/sig/openai/models/embedding.rbs +++ b/sig/openai/models/embedding.rbs @@ -15,6 +15,12 @@ module OpenAI index: Integer, ?object: :embedding ) -> void + + def to_hash: -> { + embedding: ::Array[Float], + index: Integer, + object: :embedding + } end end end diff --git a/sig/openai/models/embedding_create_params.rbs b/sig/openai/models/embedding_create_params.rbs index 73e2bc84..4600282f 100644 --- a/sig/openai/models/embedding_create_params.rbs +++ b/sig/openai/models/embedding_create_params.rbs @@ -41,6 +41,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + input: OpenAI::Models::EmbeddingCreateParams::input, + model: OpenAI::Models::EmbeddingCreateParams::model, + dimensions: Integer, + encoding_format: OpenAI::Models::EmbeddingCreateParams::encoding_format, + user: String, + request_options: OpenAI::RequestOptions + } + type input = String | ::Array[String] | ::Array[Integer] | ::Array[::Array[Integer]] diff --git a/sig/openai/models/error_object.rbs b/sig/openai/models/error_object.rbs index f3cb58bb..9ee1bb01 100644 --- a/sig/openai/models/error_object.rbs +++ b/sig/openai/models/error_object.rbs @@ -18,6 +18,13 @@ module OpenAI param: String?, type: String ) -> void + + def to_hash: -> { + code: String?, + message: String, + param: String?, + type: String + } end end end diff --git a/sig/openai/models/eval_create_params.rbs b/sig/openai/models/eval_create_params.rbs index 65ab4944..26468913 100644 --- a/sig/openai/models/eval_create_params.rbs +++ b/sig/openai/models/eval_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI type eval_create_params = { data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, - testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion], + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], metadata: OpenAI::Models::metadata?, name: String } @@ -15,7 +15,7 @@ module OpenAI attr_accessor data_source_config: OpenAI::Models::EvalCreateParams::data_source_config - attr_accessor testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion] + attr_accessor testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion] attr_accessor metadata: OpenAI::Models::metadata? @@ -25,12 +25,20 @@ module OpenAI def initialize: ( data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, - testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion], + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], + metadata: OpenAI::Models::metadata?, + name: String, + request_options: OpenAI::RequestOptions + } + type data_source_config = OpenAI::EvalCreateParams::DataSourceConfig::Custom | OpenAI::EvalCreateParams::DataSourceConfig::Logs @@ -60,6 +68,12 @@ module OpenAI ?include_sample_schema: bool, ?type: :custom ) -> void + + def to_hash: -> { + item_schema: ::Hash[Symbol, top], + type: :custom, + include_sample_schema: bool + } end type logs = { type: :logs, metadata: ::Hash[Symbol, top] } @@ -72,13 +86,15 @@ module OpenAI def metadata=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] def initialize: (?metadata: ::Hash[Symbol, top], ?type: :logs) -> void + + def to_hash: -> { type: :logs, metadata: ::Hash[Symbol, top] } end type stored_completions = - { type: :"stored-completions", metadata: ::Hash[Symbol, top] } + { type: :stored_completions, metadata: ::Hash[Symbol, top] } class StoredCompletions < OpenAI::Internal::Type::BaseModel - attr_accessor type: :"stored-completions" + attr_accessor type: :stored_completions attr_reader metadata: ::Hash[Symbol, top]? @@ -86,8 +102,13 @@ module OpenAI def initialize: ( ?metadata: ::Hash[Symbol, top], - ?type: :"stored-completions" + ?type: :stored_completions ) -> void + + def to_hash: -> { + type: :stored_completions, + metadata: ::Hash[Symbol, top] + } end def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::data_source_config] @@ -105,7 +126,7 @@ module OpenAI type label_model = { - input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input], + input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input], labels: ::Array[String], model: String, name: String, @@ -114,7 +135,7 @@ module OpenAI } class LabelModel < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input] + attr_accessor input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input] attr_accessor labels: ::Array[String] @@ -127,7 +148,7 @@ module OpenAI attr_accessor type: :label_model def initialize: ( - input: ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input], + input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input], labels: ::Array[String], model: String, name: String, @@ -135,9 +156,18 @@ module OpenAI ?type: :label_model ) -> void + def to_hash: -> { + input: ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input], + labels: ::Array[String], + model: String, + name: String, + passing_labels: ::Array[String], + type: :label_model + } + type input = OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage - | OpenAI::EvalItem + | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem module Input extend OpenAI::Internal::Type::Union @@ -150,9 +180,88 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end - def self?.variants: -> ::Array[OpenAI::EvalCreateParams::TestingCriterion::LabelModel::input] + type eval_item = + { + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content + + attr_accessor role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role + + attr_reader type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_? + + def type=: ( + OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + ) -> OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + ?type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + ) -> void + + def to_hash: -> { + content: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content, + role: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role, + type: OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_ + } + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> { text: String, type: :output_text } + end + + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::input] end end @@ -164,6 +273,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type python = { pass_threshold: Float } @@ -174,6 +285,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type score_model = { pass_threshold: Float } @@ -184,9 +297,11 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end - def self?.variants: -> ::Array[OpenAI::EvalCreateParams::testing_criterion] + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateParams::testing_criterion] end end end diff --git a/sig/openai/models/eval_create_response.rbs b/sig/openai/models/eval_create_response.rbs index 34408cec..0d4799af 100644 --- a/sig/openai/models/eval_create_response.rbs +++ b/sig/openai/models/eval_create_response.rbs @@ -36,20 +36,57 @@ module OpenAI ?object: :eval ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalCreateResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + testing_criteria: ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion] + } + type data_source_config = OpenAI::EvalCustomDataSourceConfig - | OpenAI::EvalLogsDataSourceConfig + | OpenAI::Models::EvalCreateResponse::DataSourceConfig::Logs | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union + type logs = + { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + + class Logs < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :logs + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + schema: ::Hash[Symbol, top], + ?metadata: OpenAI::Models::metadata?, + ?type: :logs + ) -> void + + def to_hash: -> { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + end + def self?.variants: -> ::Array[OpenAI::Models::EvalCreateResponse::data_source_config] end type testing_criterion = - OpenAI::Graders::LabelModelGrader - | OpenAI::Graders::StringCheckGrader + OpenAI::Models::Graders::LabelModelGrader + | OpenAI::Models::Graders::StringCheckGrader | OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderTextSimilarity | OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderPython | OpenAI::Models::EvalCreateResponse::TestingCriterion::EvalGraderScoreModel @@ -65,6 +102,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_python = { pass_threshold: Float } @@ -75,6 +114,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_score_model = { pass_threshold: Float } @@ -85,6 +126,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end def self?.variants: -> ::Array[OpenAI::Models::EvalCreateResponse::testing_criterion] diff --git a/sig/openai/models/eval_custom_data_source_config.rbs b/sig/openai/models/eval_custom_data_source_config.rbs index 8b53b580..6d412cef 100644 --- a/sig/openai/models/eval_custom_data_source_config.rbs +++ b/sig/openai/models/eval_custom_data_source_config.rbs @@ -9,6 +9,8 @@ module OpenAI attr_accessor type: :custom def initialize: (schema: ::Hash[Symbol, top], ?type: :custom) -> void + + def to_hash: -> { schema: ::Hash[Symbol, top], type: :custom } end end end diff --git a/sig/openai/models/eval_delete_params.rbs b/sig/openai/models/eval_delete_params.rbs index 4fb3f99f..f5a6eb71 100644 --- a/sig/openai/models/eval_delete_params.rbs +++ b/sig/openai/models/eval_delete_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/eval_delete_response.rbs b/sig/openai/models/eval_delete_response.rbs index 4f0adf53..95ec2c86 100644 --- a/sig/openai/models/eval_delete_response.rbs +++ b/sig/openai/models/eval_delete_response.rbs @@ -11,6 +11,8 @@ module OpenAI attr_accessor object: String def initialize: (deleted: bool, eval_id: String, object: String) -> void + + def to_hash: -> { deleted: bool, eval_id: String, object: String } end end end diff --git a/sig/openai/models/eval_item.rbs b/sig/openai/models/eval_item.rbs deleted file mode 100644 index fe989ca9..00000000 --- a/sig/openai/models/eval_item.rbs +++ /dev/null @@ -1,70 +0,0 @@ -module OpenAI - module Models - type eval_item = - { - content: OpenAI::EvalItem::content, - role: OpenAI::EvalItem::role, - type: OpenAI::EvalItem::type_ - } - - class EvalItem < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::EvalItem::content - - attr_accessor role: OpenAI::EvalItem::role - - attr_reader type: OpenAI::EvalItem::type_? - - def type=: (OpenAI::EvalItem::type_) -> OpenAI::EvalItem::type_ - - def initialize: ( - content: OpenAI::EvalItem::content, - role: OpenAI::EvalItem::role, - ?type: OpenAI::EvalItem::type_ - ) -> void - - type content = - String - | OpenAI::Responses::ResponseInputText - | OpenAI::EvalItem::Content::OutputText - - module Content - extend OpenAI::Internal::Type::Union - - type output_text = { text: String, type: :output_text } - - class OutputText < OpenAI::Internal::Type::BaseModel - attr_accessor text: String - - attr_accessor type: :output_text - - def initialize: (text: String, ?type: :output_text) -> void - end - - def self?.variants: -> ::Array[OpenAI::EvalItem::content] - end - - type role = :user | :assistant | :system | :developer - - module Role - extend OpenAI::Internal::Type::Enum - - USER: :user - ASSISTANT: :assistant - SYSTEM: :system - DEVELOPER: :developer - - def self?.values: -> ::Array[OpenAI::EvalItem::role] - end - - type type_ = :message - - module Type - extend OpenAI::Internal::Type::Enum - - MESSAGE: :message - - def self?.values: -> ::Array[OpenAI::EvalItem::type_] - end - end - end -end diff --git a/sig/openai/models/eval_list_params.rbs b/sig/openai/models/eval_list_params.rbs index 57ad5212..ff25dbd4 100644 --- a/sig/openai/models/eval_list_params.rbs +++ b/sig/openai/models/eval_list_params.rbs @@ -41,6 +41,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::EvalListParams::order, + order_by: OpenAI::Models::EvalListParams::order_by, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/eval_list_response.rbs b/sig/openai/models/eval_list_response.rbs index d9f4939c..ef01d7c9 100644 --- a/sig/openai/models/eval_list_response.rbs +++ b/sig/openai/models/eval_list_response.rbs @@ -36,20 +36,57 @@ module OpenAI ?object: :eval ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalListResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + testing_criteria: ::Array[OpenAI::Models::EvalListResponse::testing_criterion] + } + type data_source_config = OpenAI::EvalCustomDataSourceConfig - | OpenAI::EvalLogsDataSourceConfig + | OpenAI::Models::EvalListResponse::DataSourceConfig::Logs | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union + type logs = + { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + + class Logs < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :logs + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + schema: ::Hash[Symbol, top], + ?metadata: OpenAI::Models::metadata?, + ?type: :logs + ) -> void + + def to_hash: -> { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + end + def self?.variants: -> ::Array[OpenAI::Models::EvalListResponse::data_source_config] end type testing_criterion = - OpenAI::Graders::LabelModelGrader - | OpenAI::Graders::StringCheckGrader + OpenAI::Models::Graders::LabelModelGrader + | OpenAI::Models::Graders::StringCheckGrader | OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderTextSimilarity | OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderPython | OpenAI::Models::EvalListResponse::TestingCriterion::EvalGraderScoreModel @@ -65,6 +102,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_python = { pass_threshold: Float } @@ -75,6 +114,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_score_model = { pass_threshold: Float } @@ -85,6 +126,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end def self?.variants: -> ::Array[OpenAI::Models::EvalListResponse::testing_criterion] diff --git a/sig/openai/models/eval_logs_data_source_config.rbs b/sig/openai/models/eval_logs_data_source_config.rbs deleted file mode 100644 index fb0e37aa..00000000 --- a/sig/openai/models/eval_logs_data_source_config.rbs +++ /dev/null @@ -1,24 +0,0 @@ -module OpenAI - module Models - type eval_logs_data_source_config = - { - schema: ::Hash[Symbol, top], - type: :logs, - metadata: OpenAI::Models::metadata? - } - - class EvalLogsDataSourceConfig < OpenAI::Internal::Type::BaseModel - attr_accessor schema: ::Hash[Symbol, top] - - attr_accessor type: :logs - - attr_accessor metadata: OpenAI::Models::metadata? - - def initialize: ( - schema: ::Hash[Symbol, top], - ?metadata: OpenAI::Models::metadata?, - ?type: :logs - ) -> void - end - end -end diff --git a/sig/openai/models/eval_retrieve_params.rbs b/sig/openai/models/eval_retrieve_params.rbs index c6242dcb..167a0920 100644 --- a/sig/openai/models/eval_retrieve_params.rbs +++ b/sig/openai/models/eval_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/eval_retrieve_response.rbs b/sig/openai/models/eval_retrieve_response.rbs index e04883e7..10f46696 100644 --- a/sig/openai/models/eval_retrieve_response.rbs +++ b/sig/openai/models/eval_retrieve_response.rbs @@ -36,20 +36,57 @@ module OpenAI ?object: :eval ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalRetrieveResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + testing_criteria: ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion] + } + type data_source_config = OpenAI::EvalCustomDataSourceConfig - | OpenAI::EvalLogsDataSourceConfig + | OpenAI::Models::EvalRetrieveResponse::DataSourceConfig::Logs | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union + type logs = + { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + + class Logs < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :logs + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + schema: ::Hash[Symbol, top], + ?metadata: OpenAI::Models::metadata?, + ?type: :logs + ) -> void + + def to_hash: -> { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + end + def self?.variants: -> ::Array[OpenAI::Models::EvalRetrieveResponse::data_source_config] end type testing_criterion = - OpenAI::Graders::LabelModelGrader - | OpenAI::Graders::StringCheckGrader + OpenAI::Models::Graders::LabelModelGrader + | OpenAI::Models::Graders::StringCheckGrader | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderTextSimilarity | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderPython | OpenAI::Models::EvalRetrieveResponse::TestingCriterion::EvalGraderScoreModel @@ -65,6 +102,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_python = { pass_threshold: Float } @@ -75,6 +114,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_score_model = { pass_threshold: Float } @@ -85,6 +126,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end def self?.variants: -> ::Array[OpenAI::Models::EvalRetrieveResponse::testing_criterion] diff --git a/sig/openai/models/eval_stored_completions_data_source_config.rbs b/sig/openai/models/eval_stored_completions_data_source_config.rbs index f77af6cc..1237e523 100644 --- a/sig/openai/models/eval_stored_completions_data_source_config.rbs +++ b/sig/openai/models/eval_stored_completions_data_source_config.rbs @@ -3,22 +3,28 @@ module OpenAI type eval_stored_completions_data_source_config = { schema: ::Hash[Symbol, top], - type: :"stored-completions", + type: :stored_completions, metadata: OpenAI::Models::metadata? } class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel attr_accessor schema: ::Hash[Symbol, top] - attr_accessor type: :"stored-completions" + attr_accessor type: :stored_completions attr_accessor metadata: OpenAI::Models::metadata? def initialize: ( schema: ::Hash[Symbol, top], ?metadata: OpenAI::Models::metadata?, - ?type: :"stored-completions" + ?type: :stored_completions ) -> void + + def to_hash: -> { + schema: ::Hash[Symbol, top], + type: :stored_completions, + metadata: OpenAI::Models::metadata? + } end end end diff --git a/sig/openai/models/eval_update_params.rbs b/sig/openai/models/eval_update_params.rbs index cb60c3c2..fc6c2540 100644 --- a/sig/openai/models/eval_update_params.rbs +++ b/sig/openai/models/eval_update_params.rbs @@ -19,6 +19,12 @@ module OpenAI ?name: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + metadata: OpenAI::Models::metadata?, + name: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/eval_update_response.rbs b/sig/openai/models/eval_update_response.rbs index cf2da5e0..532a5435 100644 --- a/sig/openai/models/eval_update_response.rbs +++ b/sig/openai/models/eval_update_response.rbs @@ -36,20 +36,57 @@ module OpenAI ?object: :eval ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source_config: OpenAI::Models::EvalUpdateResponse::data_source_config, + metadata: OpenAI::Models::metadata?, + name: String, + object: :eval, + testing_criteria: ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion] + } + type data_source_config = OpenAI::EvalCustomDataSourceConfig - | OpenAI::EvalLogsDataSourceConfig + | OpenAI::Models::EvalUpdateResponse::DataSourceConfig::Logs | OpenAI::EvalStoredCompletionsDataSourceConfig module DataSourceConfig extend OpenAI::Internal::Type::Union + type logs = + { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + + class Logs < OpenAI::Internal::Type::BaseModel + attr_accessor schema: ::Hash[Symbol, top] + + attr_accessor type: :logs + + attr_accessor metadata: OpenAI::Models::metadata? + + def initialize: ( + schema: ::Hash[Symbol, top], + ?metadata: OpenAI::Models::metadata?, + ?type: :logs + ) -> void + + def to_hash: -> { + schema: ::Hash[Symbol, top], + type: :logs, + metadata: OpenAI::Models::metadata? + } + end + def self?.variants: -> ::Array[OpenAI::Models::EvalUpdateResponse::data_source_config] end type testing_criterion = - OpenAI::Graders::LabelModelGrader - | OpenAI::Graders::StringCheckGrader + OpenAI::Models::Graders::LabelModelGrader + | OpenAI::Models::Graders::StringCheckGrader | OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderTextSimilarity | OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderPython | OpenAI::Models::EvalUpdateResponse::TestingCriterion::EvalGraderScoreModel @@ -65,6 +102,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_python = { pass_threshold: Float } @@ -75,6 +114,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end type eval_grader_score_model = { pass_threshold: Float } @@ -85,6 +126,8 @@ module OpenAI def pass_threshold=: (Float _) -> Float def initialize: (?pass_threshold: Float) -> void + + def to_hash: -> { pass_threshold: Float } end def self?.variants: -> ::Array[OpenAI::Models::EvalUpdateResponse::testing_criterion] diff --git a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs index 99116842..181d516b 100644 --- a/sig/openai/models/evals/create_eval_completions_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_completions_run_data_source.rbs @@ -3,23 +3,23 @@ module OpenAI module Evals type create_eval_completions_run_data_source = { - source: OpenAI::Evals::CreateEvalCompletionsRunDataSource::source, - type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_, - input_messages: OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages, + source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, + input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, model: String, sampling_params: OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Evals::CreateEvalCompletionsRunDataSource::source + attr_accessor source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source - attr_accessor type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_ + attr_accessor type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_ - attr_reader input_messages: OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages? + attr_reader input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages? def input_messages=: ( - OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages - ) -> OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages + ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages attr_reader model: String? @@ -32,21 +32,84 @@ module OpenAI ) -> OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams def initialize: ( - source: OpenAI::Evals::CreateEvalCompletionsRunDataSource::source, - type: OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_, - ?input_messages: OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages, + source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, + ?input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, ?model: String, ?sampling_params: OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams ) -> void + def to_hash: -> { + source: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_, + input_messages: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages, + model: String, + sampling_params: OpenAI::Evals::CreateEvalCompletionsRunDataSource::SamplingParams + } + type source = - OpenAI::Evals::EvalJSONLFileContentSource - | OpenAI::Evals::EvalJSONLFileIDSource + OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileID | OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions module Source extend OpenAI::Internal::Type::Union + type file_content = + { + content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent::Content], + type: :file_content + } + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } + end + type stored_completions = { type: :stored_completions, @@ -78,9 +141,18 @@ module OpenAI ?model: String?, ?type: :stored_completions ) -> void + + def to_hash: -> { + type: :stored_completions, + created_after: Integer?, + created_before: Integer?, + limit: Integer?, + metadata: OpenAI::Models::metadata?, + model: String? + } end - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::source] + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::source] end type type_ = :completions @@ -90,7 +162,7 @@ module OpenAI COMPLETIONS: :completions - def self?.values: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::type_] + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::type_] end type input_messages = @@ -102,27 +174,110 @@ module OpenAI type template = { - template: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], type: :template } class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] + attr_accessor template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] attr_accessor type: :template def initialize: ( - template: ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], ?type: :template ) -> void + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template], + type: :template + } + type template = - OpenAI::Responses::EasyInputMessage | OpenAI::EvalItem + OpenAI::Responses::EasyInputMessage + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message module Template extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] + type message = + { + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + } + + class Message < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content + + attr_accessor role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role + + attr_reader type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_? + + def type=: ( + OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + ) -> OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + + def initialize: ( + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + ?type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + ) -> void + + def to_hash: -> { + content: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content, + role: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role, + type: OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_ + } + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> { text: String, type: :output_text } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::Message::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::template] end end @@ -138,9 +293,11 @@ module OpenAI item_reference: String, ?type: :item_reference ) -> void + + def to_hash: -> { item_reference: String, type: :item_reference } end - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalCompletionsRunDataSource::input_messages] + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::input_messages] end type sampling_params = @@ -174,6 +331,13 @@ module OpenAI ?temperature: Float, ?top_p: Float ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } end end end diff --git a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs index c7352e33..ff6c8b6a 100644 --- a/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs +++ b/sig/openai/models/evals/create_eval_jsonl_run_data_source.rbs @@ -3,28 +3,88 @@ module OpenAI module Evals type create_eval_jsonl_run_data_source = { - source: OpenAI::Evals::CreateEvalJSONLRunDataSource::source, + source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source, type: :jsonl } class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Evals::CreateEvalJSONLRunDataSource::source + attr_accessor source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source attr_accessor type: :jsonl def initialize: ( - source: OpenAI::Evals::CreateEvalJSONLRunDataSource::source, + source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source, ?type: :jsonl ) -> void + def to_hash: -> { + source: OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source, + type: :jsonl + } + type source = - OpenAI::Evals::EvalJSONLFileContentSource - | OpenAI::Evals::EvalJSONLFileIDSource + OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent + | OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileID module Source extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::source] + type file_content = + { + content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Evals::CreateEvalJSONLRunDataSource::Source::FileContent::Content], + type: :file_content + } + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::source] end end end diff --git a/sig/openai/models/evals/create_eval_responses_run_data_source.rbs b/sig/openai/models/evals/create_eval_responses_run_data_source.rbs deleted file mode 100644 index 2f24f2f9..00000000 --- a/sig/openai/models/evals/create_eval_responses_run_data_source.rbs +++ /dev/null @@ -1,216 +0,0 @@ -module OpenAI - module Models - module Evals - type create_eval_responses_run_data_source = - { - source: OpenAI::Evals::CreateEvalResponsesRunDataSource::source, - type: OpenAI::Evals::CreateEvalResponsesRunDataSource::type_, - input_messages: OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages, - model: String, - sampling_params: OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - } - - class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel - attr_accessor source: OpenAI::Evals::CreateEvalResponsesRunDataSource::source - - attr_accessor type: OpenAI::Evals::CreateEvalResponsesRunDataSource::type_ - - attr_reader input_messages: OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages? - - def input_messages=: ( - OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages - ) -> OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages - - attr_reader model: String? - - def model=: (String) -> String - - attr_reader sampling_params: OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams? - - def sampling_params=: ( - OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - ) -> OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - - def initialize: ( - source: OpenAI::Evals::CreateEvalResponsesRunDataSource::source, - type: OpenAI::Evals::CreateEvalResponsesRunDataSource::type_, - ?input_messages: OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages, - ?model: String, - ?sampling_params: OpenAI::Evals::CreateEvalResponsesRunDataSource::SamplingParams - ) -> void - - type source = - OpenAI::Evals::EvalJSONLFileContentSource - | OpenAI::Evals::EvalJSONLFileIDSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource::Source::Responses - - module Source - extend OpenAI::Internal::Type::Union - - type responses = - { - type: :responses, - created_after: Integer?, - created_before: Integer?, - has_tool_calls: bool?, - instructions_search: String?, - metadata: top?, - model: String?, - reasoning_effort: OpenAI::Models::reasoning_effort?, - temperature: Float?, - tools: ::Array[String]?, - top_p: Float?, - users: ::Array[String]? - } - - class Responses < OpenAI::Internal::Type::BaseModel - attr_accessor type: :responses - - attr_accessor created_after: Integer? - - attr_accessor created_before: Integer? - - attr_accessor has_tool_calls: bool? - - attr_accessor instructions_search: String? - - attr_accessor metadata: top? - - attr_accessor model: String? - - attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? - - attr_accessor temperature: Float? - - attr_accessor tools: ::Array[String]? - - attr_accessor top_p: Float? - - attr_accessor users: ::Array[String]? - - def initialize: ( - ?created_after: Integer?, - ?created_before: Integer?, - ?has_tool_calls: bool?, - ?instructions_search: String?, - ?metadata: top?, - ?model: String?, - ?reasoning_effort: OpenAI::Models::reasoning_effort?, - ?temperature: Float?, - ?tools: ::Array[String]?, - ?top_p: Float?, - ?users: ::Array[String]?, - ?type: :responses - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::source] - end - - type type_ = :responses - - module Type - extend OpenAI::Internal::Type::Enum - - RESPONSES: :responses - - def self?.values: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::type_] - end - - type input_messages = - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template - | OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::ItemReference - - module InputMessages - extend OpenAI::Internal::Type::Union - - type template = - { - template: ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template], - type: :template - } - - class Template < OpenAI::Internal::Type::BaseModel - attr_accessor template: ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template] - - attr_accessor type: :template - - def initialize: ( - template: ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template], - ?type: :template - ) -> void - - type template = - OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage - | OpenAI::EvalItem - - module Template - extend OpenAI::Internal::Type::Union - - type chat_message = { content: String, role: String } - - class ChatMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: String - - attr_accessor role: String - - def initialize: (content: String, role: String) -> void - end - - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::InputMessages::Template::template] - end - end - - type item_reference = - { item_reference: String, type: :item_reference } - - class ItemReference < OpenAI::Internal::Type::BaseModel - attr_accessor item_reference: String - - attr_accessor type: :item_reference - - def initialize: ( - item_reference: String, - ?type: :item_reference - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Evals::CreateEvalResponsesRunDataSource::input_messages] - end - - type sampling_params = - { - max_completion_tokens: Integer, - seed: Integer, - temperature: Float, - top_p: Float - } - - class SamplingParams < OpenAI::Internal::Type::BaseModel - attr_reader max_completion_tokens: Integer? - - def max_completion_tokens=: (Integer) -> Integer - - attr_reader seed: Integer? - - def seed=: (Integer) -> Integer - - attr_reader temperature: Float? - - def temperature=: (Float) -> Float - - attr_reader top_p: Float? - - def top_p=: (Float) -> Float - - def initialize: ( - ?max_completion_tokens: Integer, - ?seed: Integer, - ?temperature: Float, - ?top_p: Float - ) -> void - end - end - end - end -end diff --git a/sig/openai/models/evals/eval_api_error.rbs b/sig/openai/models/evals/eval_api_error.rbs index 70bd686a..9504768f 100644 --- a/sig/openai/models/evals/eval_api_error.rbs +++ b/sig/openai/models/evals/eval_api_error.rbs @@ -11,6 +11,8 @@ module OpenAI attr_accessor message: String def initialize: (code: String, message: String) -> void + + def to_hash: -> { code: String, message: String } end end end diff --git a/sig/openai/models/evals/eval_jsonl_file_content_source.rbs b/sig/openai/models/evals/eval_jsonl_file_content_source.rbs deleted file mode 100644 index 09d077ee..00000000 --- a/sig/openai/models/evals/eval_jsonl_file_content_source.rbs +++ /dev/null @@ -1,40 +0,0 @@ -module OpenAI - module Models - class EvalJSONLFileContentSource = Evals::EvalJSONLFileContentSource - - module Evals - type eval_jsonl_file_content_source = - { - content: ::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content], - type: :file_content - } - - class EvalJSONLFileContentSource < OpenAI::Internal::Type::BaseModel - attr_accessor content: ::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content] - - attr_accessor type: :file_content - - def initialize: ( - content: ::Array[OpenAI::Evals::EvalJSONLFileContentSource::Content], - ?type: :file_content - ) -> void - - type content = - { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } - - class Content < OpenAI::Internal::Type::BaseModel - attr_accessor item: ::Hash[Symbol, top] - - attr_reader sample: ::Hash[Symbol, top]? - - def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] - - def initialize: ( - item: ::Hash[Symbol, top], - ?sample: ::Hash[Symbol, top] - ) -> void - end - end - end - end -end diff --git a/sig/openai/models/evals/eval_jsonl_file_id_source.rbs b/sig/openai/models/evals/eval_jsonl_file_id_source.rbs deleted file mode 100644 index 611068c4..00000000 --- a/sig/openai/models/evals/eval_jsonl_file_id_source.rbs +++ /dev/null @@ -1,17 +0,0 @@ -module OpenAI - module Models - class EvalJSONLFileIDSource = Evals::EvalJSONLFileIDSource - - module Evals - type eval_jsonl_file_id_source = { id: String, type: :file_id } - - class EvalJSONLFileIDSource < OpenAI::Internal::Type::BaseModel - attr_accessor id: String - - attr_accessor type: :file_id - - def initialize: (id: String, ?type: :file_id) -> void - end - end - end -end diff --git a/sig/openai/models/evals/run_cancel_params.rbs b/sig/openai/models/evals/run_cancel_params.rbs index 32a0b270..19118357 100644 --- a/sig/openai/models/evals/run_cancel_params.rbs +++ b/sig/openai/models/evals/run_cancel_params.rbs @@ -14,6 +14,11 @@ module OpenAI eval_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + eval_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/evals/run_cancel_response.rbs b/sig/openai/models/evals/run_cancel_response.rbs index 6aa68c38..53408038 100644 --- a/sig/openai/models/evals/run_cancel_response.rbs +++ b/sig/openai/models/evals/run_cancel_response.rbs @@ -65,14 +65,404 @@ module OpenAI ?object: :"eval.run" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunCancelResponse::data_source, + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunCancelResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunCancelResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunCancelResponse::ResultCounts, + status: String + } + type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses module DataSource extend OpenAI::Internal::Type::Union + type responses = + { + source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source + + attr_accessor type: :responses + + attr_reader input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages + ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source, + ?input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams, + ?type: :responses + ) -> void + + def to_hash: -> { + source: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::SamplingParams + } + + type source = + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileID + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } + end + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + + def to_hash: -> { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::source] + end + + type input_messages = + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + type template = + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + def to_hash: -> { + content: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + + def to_hash: -> { text: String, type: :output_text } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> { + item_reference: String, + type: :item_reference + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::DataSource::Responses::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + end + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCancelResponse::data_source] end @@ -107,6 +497,15 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } end type per_testing_criteria_result = @@ -124,6 +523,12 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void + + def to_hash: -> { + failed: Integer, + passed: Integer, + testing_criteria: String + } end type result_counts = @@ -144,6 +549,13 @@ module OpenAI passed: Integer, total: Integer ) -> void + + def to_hash: -> { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } end end end diff --git a/sig/openai/models/evals/run_create_params.rbs b/sig/openai/models/evals/run_create_params.rbs index e52fc6de..7c36c313 100644 --- a/sig/openai/models/evals/run_create_params.rbs +++ b/sig/openai/models/evals/run_create_params.rbs @@ -28,14 +28,404 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + data_source: OpenAI::Models::Evals::RunCreateParams::data_source, + metadata: OpenAI::Models::metadata?, + name: String, + request_options: OpenAI::RequestOptions + } + type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource module DataSource extend OpenAI::Internal::Type::Union + type create_eval_responses_run_data_source = + { + source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + model: String, + sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + + class CreateEvalResponsesRunDataSource < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source + + attr_accessor type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_ + + attr_reader input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages + ) -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams? + + def sampling_params=: ( + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ) -> OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + ?input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + ?model: String, + ?sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + ) -> void + + def to_hash: -> { + source: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_, + input_messages: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages, + model: String, + sampling_params: OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::SamplingParams + } + + type source = + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileID + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::Source::FileContent::Content], + type: :file_content + } + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } + end + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + + def to_hash: -> { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::source] + end + + type type_ = :responses + + module Type + extend OpenAI::Internal::Type::Enum + + RESPONSES: :responses + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::type_] + end + + type input_messages = + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template], + type: :template + } + + type template = + OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::ChatMessage + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + def to_hash: -> { + content: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_ + } + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + + def to_hash: -> { text: String, type: :output_text } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> { + item_reference: String, + type: :item_reference + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::DataSource::CreateEvalResponsesRunDataSource::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + end + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateParams::data_source] end end diff --git a/sig/openai/models/evals/run_create_response.rbs b/sig/openai/models/evals/run_create_response.rbs index 8bea9b11..6076ce25 100644 --- a/sig/openai/models/evals/run_create_response.rbs +++ b/sig/openai/models/evals/run_create_response.rbs @@ -65,14 +65,404 @@ module OpenAI ?object: :"eval.run" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunCreateResponse::data_source, + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunCreateResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunCreateResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunCreateResponse::ResultCounts, + status: String + } + type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses module DataSource extend OpenAI::Internal::Type::Union + type responses = + { + source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source + + attr_accessor type: :responses + + attr_reader input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages + ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source, + ?input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams, + ?type: :responses + ) -> void + + def to_hash: -> { + source: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::SamplingParams + } + + type source = + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileID + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } + end + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + + def to_hash: -> { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::source] + end + + type input_messages = + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + type template = + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + def to_hash: -> { + content: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + + def to_hash: -> { text: String, type: :output_text } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> { + item_reference: String, + type: :item_reference + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::DataSource::Responses::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + end + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunCreateResponse::data_source] end @@ -107,6 +497,15 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } end type per_testing_criteria_result = @@ -124,6 +523,12 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void + + def to_hash: -> { + failed: Integer, + passed: Integer, + testing_criteria: String + } end type result_counts = @@ -144,6 +549,13 @@ module OpenAI passed: Integer, total: Integer ) -> void + + def to_hash: -> { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } end end end diff --git a/sig/openai/models/evals/run_delete_params.rbs b/sig/openai/models/evals/run_delete_params.rbs index f775e4d5..097144da 100644 --- a/sig/openai/models/evals/run_delete_params.rbs +++ b/sig/openai/models/evals/run_delete_params.rbs @@ -14,6 +14,11 @@ module OpenAI eval_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + eval_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/evals/run_delete_response.rbs b/sig/openai/models/evals/run_delete_response.rbs index 21d23706..37a02bec 100644 --- a/sig/openai/models/evals/run_delete_response.rbs +++ b/sig/openai/models/evals/run_delete_response.rbs @@ -22,6 +22,8 @@ module OpenAI ?object: String, ?run_id: String ) -> void + + def to_hash: -> { deleted: bool, object: String, run_id: String } end end end diff --git a/sig/openai/models/evals/run_list_params.rbs b/sig/openai/models/evals/run_list_params.rbs index 33c002ca..95f65ca4 100644 --- a/sig/openai/models/evals/run_list_params.rbs +++ b/sig/openai/models/evals/run_list_params.rbs @@ -42,6 +42,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::Evals::RunListParams::order, + status: OpenAI::Models::Evals::RunListParams::status, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/evals/run_list_response.rbs b/sig/openai/models/evals/run_list_response.rbs index 4b372182..b87620d8 100644 --- a/sig/openai/models/evals/run_list_response.rbs +++ b/sig/openai/models/evals/run_list_response.rbs @@ -65,14 +65,404 @@ module OpenAI ?object: :"eval.run" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunListResponse::data_source, + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunListResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunListResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunListResponse::ResultCounts, + status: String + } + type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses module DataSource extend OpenAI::Internal::Type::Union + type responses = + { + source: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source + + attr_accessor type: :responses + + attr_reader input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages + ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source, + ?input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams, + ?type: :responses + ) -> void + + def to_hash: -> { + source: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::SamplingParams + } + + type source = + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileID + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } + end + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + + def to_hash: -> { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::source] + end + + type input_messages = + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + type template = + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + def to_hash: -> { + content: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + + def to_hash: -> { text: String, type: :output_text } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> { + item_reference: String, + type: :item_reference + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::DataSource::Responses::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + end + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunListResponse::data_source] end @@ -107,6 +497,15 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } end type per_testing_criteria_result = @@ -124,6 +523,12 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void + + def to_hash: -> { + failed: Integer, + passed: Integer, + testing_criteria: String + } end type result_counts = @@ -144,6 +549,13 @@ module OpenAI passed: Integer, total: Integer ) -> void + + def to_hash: -> { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } end end end diff --git a/sig/openai/models/evals/run_retrieve_params.rbs b/sig/openai/models/evals/run_retrieve_params.rbs index 955259eb..bc35ad38 100644 --- a/sig/openai/models/evals/run_retrieve_params.rbs +++ b/sig/openai/models/evals/run_retrieve_params.rbs @@ -14,6 +14,11 @@ module OpenAI eval_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + eval_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/evals/run_retrieve_response.rbs b/sig/openai/models/evals/run_retrieve_response.rbs index f6247ab4..97d98b20 100644 --- a/sig/openai/models/evals/run_retrieve_response.rbs +++ b/sig/openai/models/evals/run_retrieve_response.rbs @@ -65,14 +65,404 @@ module OpenAI ?object: :"eval.run" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + data_source: OpenAI::Models::Evals::RunRetrieveResponse::data_source, + error: OpenAI::Evals::EvalAPIError, + eval_id: String, + metadata: OpenAI::Models::metadata?, + model: String, + name: String, + object: :"eval.run", + per_model_usage: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerModelUsage], + per_testing_criteria_results: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::PerTestingCriteriaResult], + report_url: String, + result_counts: OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts, + status: String + } + type data_source = OpenAI::Evals::CreateEvalJSONLRunDataSource | OpenAI::Evals::CreateEvalCompletionsRunDataSource - | OpenAI::Evals::CreateEvalResponsesRunDataSource + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses module DataSource extend OpenAI::Internal::Type::Union + type responses = + { + source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source + + attr_accessor type: :responses + + attr_reader input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages? + + def input_messages=: ( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages + ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages + + attr_reader model: String? + + def model=: (String) -> String + + attr_reader sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams? + + def sampling_params=: ( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + + def initialize: ( + source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source, + ?input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages, + ?model: String, + ?sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams, + ?type: :responses + ) -> void + + def to_hash: -> { + source: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source, + type: :responses, + input_messages: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages, + model: String, + sampling_params: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::SamplingParams + } + + type source = + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileID + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::Responses + + module Source + extend OpenAI::Internal::Type::Union + + type file_content = + { + content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + class FileContent < OpenAI::Internal::Type::BaseModel + attr_accessor content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content] + + attr_accessor type: :file_content + + def initialize: ( + content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content], + ?type: :file_content + ) -> void + + def to_hash: -> { + content: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::Source::FileContent::Content], + type: :file_content + } + + type content = + { item: ::Hash[Symbol, top], sample: ::Hash[Symbol, top] } + + class Content < OpenAI::Internal::Type::BaseModel + attr_accessor item: ::Hash[Symbol, top] + + attr_reader sample: ::Hash[Symbol, top]? + + def sample=: (::Hash[Symbol, top]) -> ::Hash[Symbol, top] + + def initialize: ( + item: ::Hash[Symbol, top], + ?sample: ::Hash[Symbol, top] + ) -> void + + def to_hash: -> { + item: ::Hash[Symbol, top], + sample: ::Hash[Symbol, top] + } + end + end + + type file_id = { id: String, type: :file_id } + + class FileID < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor type: :file_id + + def initialize: (id: String, ?type: :file_id) -> void + + def to_hash: -> { id: String, type: :file_id } + end + + type responses = + { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + + class Responses < OpenAI::Internal::Type::BaseModel + attr_accessor type: :responses + + attr_accessor created_after: Integer? + + attr_accessor created_before: Integer? + + attr_accessor instructions_search: String? + + attr_accessor metadata: top? + + attr_accessor model: String? + + attr_accessor reasoning_effort: OpenAI::Models::reasoning_effort? + + attr_accessor temperature: Float? + + attr_accessor tools: ::Array[String]? + + attr_accessor top_p: Float? + + attr_accessor users: ::Array[String]? + + def initialize: ( + ?created_after: Integer?, + ?created_before: Integer?, + ?instructions_search: String?, + ?metadata: top?, + ?model: String?, + ?reasoning_effort: OpenAI::Models::reasoning_effort?, + ?temperature: Float?, + ?tools: ::Array[String]?, + ?top_p: Float?, + ?users: ::Array[String]?, + ?type: :responses + ) -> void + + def to_hash: -> { + type: :responses, + created_after: Integer?, + created_before: Integer?, + instructions_search: String?, + metadata: top?, + model: String?, + reasoning_effort: OpenAI::Models::reasoning_effort?, + temperature: Float?, + tools: ::Array[String]?, + top_p: Float?, + users: ::Array[String]? + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::source] + end + + type input_messages = + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::ItemReference + + module InputMessages + extend OpenAI::Internal::Type::Union + + type template = + { + template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + class Template < OpenAI::Internal::Type::BaseModel + attr_accessor template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::template] + + attr_accessor type: :template + + def initialize: ( + template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::template], + ?type: :template + ) -> void + + def to_hash: -> { + template: ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::template], + type: :template + } + + type template = + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::ChatMessage + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem + + module Template + extend OpenAI::Internal::Type::Union + + type chat_message = { content: String, role: String } + + class ChatMessage < OpenAI::Internal::Type::BaseModel + attr_accessor content: String + + attr_accessor role: String + + def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } + end + + type eval_item = + { + content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + class EvalItem < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content + + attr_accessor role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role + + attr_reader type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_? + + def type=: ( + OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + + def initialize: ( + content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + ?type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + ) -> void + + def to_hash: -> { + content: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content, + role: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role, + type: OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_ + } + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: ( + text: String, + ?type: :output_text + ) -> void + + def to_hash: -> { text: String, type: :output_text } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::Template::EvalItem::type_] + end + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::InputMessages::Template::template] + end + end + + type item_reference = + { item_reference: String, type: :item_reference } + + class ItemReference < OpenAI::Internal::Type::BaseModel + attr_accessor item_reference: String + + attr_accessor type: :item_reference + + def initialize: ( + item_reference: String, + ?type: :item_reference + ) -> void + + def to_hash: -> { + item_reference: String, + type: :item_reference + } + end + + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::DataSource::Responses::input_messages] + end + + type sampling_params = + { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + + class SamplingParams < OpenAI::Internal::Type::BaseModel + attr_reader max_completion_tokens: Integer? + + def max_completion_tokens=: (Integer) -> Integer + + attr_reader seed: Integer? + + def seed=: (Integer) -> Integer + + attr_reader temperature: Float? + + def temperature=: (Float) -> Float + + attr_reader top_p: Float? + + def top_p=: (Float) -> Float + + def initialize: ( + ?max_completion_tokens: Integer, + ?seed: Integer, + ?temperature: Float, + ?top_p: Float + ) -> void + + def to_hash: -> { + max_completion_tokens: Integer, + seed: Integer, + temperature: Float, + top_p: Float + } + end + end + def self?.variants: -> ::Array[OpenAI::Models::Evals::RunRetrieveResponse::data_source] end @@ -107,6 +497,15 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + invocation_count: Integer, + model_name: String, + prompt_tokens: Integer, + total_tokens: Integer + } end type per_testing_criteria_result = @@ -124,6 +523,12 @@ module OpenAI passed: Integer, testing_criteria: String ) -> void + + def to_hash: -> { + failed: Integer, + passed: Integer, + testing_criteria: String + } end type result_counts = @@ -144,6 +549,13 @@ module OpenAI passed: Integer, total: Integer ) -> void + + def to_hash: -> { + errored: Integer, + failed: Integer, + passed: Integer, + total: Integer + } end end end diff --git a/sig/openai/models/evals/runs/output_item_list_params.rbs b/sig/openai/models/evals/runs/output_item_list_params.rbs index 4ff88c6f..650dddf9 100644 --- a/sig/openai/models/evals/runs/output_item_list_params.rbs +++ b/sig/openai/models/evals/runs/output_item_list_params.rbs @@ -47,6 +47,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + eval_id: String, + after: String, + limit: Integer, + order: OpenAI::Models::Evals::Runs::OutputItemListParams::order, + status: OpenAI::Models::Evals::Runs::OutputItemListParams::status, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/evals/runs/output_item_list_response.rbs b/sig/openai/models/evals/runs/output_item_list_response.rbs index 8f79060c..53dfbc98 100644 --- a/sig/openai/models/evals/runs/output_item_list_response.rbs +++ b/sig/openai/models/evals/runs/output_item_list_response.rbs @@ -50,6 +50,19 @@ module OpenAI ?object: :"eval.run.output_item" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + datasource_item: ::Hash[Symbol, top], + datasource_item_id: Integer, + eval_id: String, + object: :"eval.run.output_item", + results: ::Array[::Hash[Symbol, top]], + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample, + status: String + } + type sample = { error: OpenAI::Evals::EvalAPIError, @@ -98,6 +111,19 @@ module OpenAI usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage ) -> void + def to_hash: -> { + error: OpenAI::Evals::EvalAPIError, + finish_reason: String, + input: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Input], + max_completion_tokens: Integer, + model: String, + output: ::Array[OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Output], + seed: Integer, + temperature: Float, + top_p: Float, + usage: OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage + } + type input = { content: String, role: String } class Input < OpenAI::Internal::Type::BaseModel @@ -106,6 +132,8 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end type output = { content: String, role: String } @@ -120,6 +148,8 @@ module OpenAI def role=: (String) -> String def initialize: (?content: String, ?role: String) -> void + + def to_hash: -> { content: String, role: String } end type usage = @@ -145,6 +175,13 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } end end end diff --git a/sig/openai/models/evals/runs/output_item_retrieve_params.rbs b/sig/openai/models/evals/runs/output_item_retrieve_params.rbs index 91add94f..65456919 100644 --- a/sig/openai/models/evals/runs/output_item_retrieve_params.rbs +++ b/sig/openai/models/evals/runs/output_item_retrieve_params.rbs @@ -19,6 +19,12 @@ module OpenAI run_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + eval_id: String, + run_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/evals/runs/output_item_retrieve_response.rbs b/sig/openai/models/evals/runs/output_item_retrieve_response.rbs index 87d32fa3..70c37909 100644 --- a/sig/openai/models/evals/runs/output_item_retrieve_response.rbs +++ b/sig/openai/models/evals/runs/output_item_retrieve_response.rbs @@ -50,6 +50,19 @@ module OpenAI ?object: :"eval.run.output_item" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + datasource_item: ::Hash[Symbol, top], + datasource_item_id: Integer, + eval_id: String, + object: :"eval.run.output_item", + results: ::Array[::Hash[Symbol, top]], + run_id: String, + sample: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample, + status: String + } + type sample = { error: OpenAI::Evals::EvalAPIError, @@ -98,6 +111,19 @@ module OpenAI usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage ) -> void + def to_hash: -> { + error: OpenAI::Evals::EvalAPIError, + finish_reason: String, + input: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Input], + max_completion_tokens: Integer, + model: String, + output: ::Array[OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Output], + seed: Integer, + temperature: Float, + top_p: Float, + usage: OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage + } + type input = { content: String, role: String } class Input < OpenAI::Internal::Type::BaseModel @@ -106,6 +132,8 @@ module OpenAI attr_accessor role: String def initialize: (content: String, role: String) -> void + + def to_hash: -> { content: String, role: String } end type output = { content: String, role: String } @@ -120,6 +148,8 @@ module OpenAI def role=: (String) -> String def initialize: (?content: String, ?role: String) -> void + + def to_hash: -> { content: String, role: String } end type usage = @@ -145,6 +175,13 @@ module OpenAI prompt_tokens: Integer, total_tokens: Integer ) -> void + + def to_hash: -> { + cached_tokens: Integer, + completion_tokens: Integer, + prompt_tokens: Integer, + total_tokens: Integer + } end end end diff --git a/sig/openai/models/file_content_params.rbs b/sig/openai/models/file_content_params.rbs index df1a12a0..c36aedab 100644 --- a/sig/openai/models/file_content_params.rbs +++ b/sig/openai/models/file_content_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/file_create_params.rbs b/sig/openai/models/file_create_params.rbs index 54eaab64..2abf1615 100644 --- a/sig/openai/models/file_create_params.rbs +++ b/sig/openai/models/file_create_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type file_create_params = { - file: (Pathname | StringIO | IO | OpenAI::FilePart), + file: OpenAI::Internal::file_input, purpose: OpenAI::Models::file_purpose } & OpenAI::Internal::Type::request_parameters @@ -11,15 +11,21 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor file: Pathname | StringIO | IO | OpenAI::FilePart + attr_accessor file: OpenAI::Internal::file_input attr_accessor purpose: OpenAI::Models::file_purpose def initialize: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + file: OpenAI::Internal::file_input, + purpose: OpenAI::Models::file_purpose, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/file_delete_params.rbs b/sig/openai/models/file_delete_params.rbs index 8cd08d59..3c3ea094 100644 --- a/sig/openai/models/file_delete_params.rbs +++ b/sig/openai/models/file_delete_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/file_deleted.rbs b/sig/openai/models/file_deleted.rbs index fd681d7c..c091a995 100644 --- a/sig/openai/models/file_deleted.rbs +++ b/sig/openai/models/file_deleted.rbs @@ -10,6 +10,8 @@ module OpenAI attr_accessor object: :file def initialize: (id: String, deleted: bool, ?object: :file) -> void + + def to_hash: -> { id: String, deleted: bool, object: :file } end end end diff --git a/sig/openai/models/file_list_params.rbs b/sig/openai/models/file_list_params.rbs index 51c07c76..2d459f2d 100644 --- a/sig/openai/models/file_list_params.rbs +++ b/sig/openai/models/file_list_params.rbs @@ -39,6 +39,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::FileListParams::order, + purpose: String, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/file_object.rbs b/sig/openai/models/file_object.rbs index 3ebcb910..acbdedc2 100644 --- a/sig/openai/models/file_object.rbs +++ b/sig/openai/models/file_object.rbs @@ -7,8 +7,8 @@ module OpenAI created_at: Integer, filename: String, object: :file, - purpose: OpenAI::FileObject::purpose, - status: OpenAI::FileObject::status, + purpose: OpenAI::Models::FileObject::purpose, + status: OpenAI::Models::FileObject::status, expires_at: Integer, status_details: String } @@ -24,9 +24,9 @@ module OpenAI attr_accessor object: :file - attr_accessor purpose: OpenAI::FileObject::purpose + attr_accessor purpose: OpenAI::Models::FileObject::purpose - attr_accessor status: OpenAI::FileObject::status + attr_accessor status: OpenAI::Models::FileObject::status attr_reader expires_at: Integer? @@ -41,13 +41,25 @@ module OpenAI bytes: Integer, created_at: Integer, filename: String, - purpose: OpenAI::FileObject::purpose, - status: OpenAI::FileObject::status, + purpose: OpenAI::Models::FileObject::purpose, + status: OpenAI::Models::FileObject::status, ?expires_at: Integer, ?status_details: String, ?object: :file ) -> void + def to_hash: -> { + id: String, + bytes: Integer, + created_at: Integer, + filename: String, + object: :file, + purpose: OpenAI::Models::FileObject::purpose, + status: OpenAI::Models::FileObject::status, + expires_at: Integer, + status_details: String + } + type purpose = :assistants | :assistants_output @@ -68,7 +80,7 @@ module OpenAI FINE_TUNE_RESULTS: :"fine-tune-results" VISION: :vision - def self?.values: -> ::Array[OpenAI::FileObject::purpose] + def self?.values: -> ::Array[OpenAI::Models::FileObject::purpose] end type status = :uploaded | :processed | :error @@ -80,7 +92,7 @@ module OpenAI PROCESSED: :processed ERROR: :error - def self?.values: -> ::Array[OpenAI::FileObject::status] + def self?.values: -> ::Array[OpenAI::Models::FileObject::status] end end end diff --git a/sig/openai/models/file_retrieve_params.rbs b/sig/openai/models/file_retrieve_params.rbs index 4788e5a6..01eca336 100644 --- a/sig/openai/models/file_retrieve_params.rbs +++ b/sig/openai/models/file_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs b/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs index 4c094ab8..0fcec2cc 100644 --- a/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs +++ b/sig/openai/models/fine_tuning/alpha/grader_run_params.rbs @@ -27,6 +27,13 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + grader: OpenAI::Models::FineTuning::Alpha::GraderRunParams::grader, + model_sample: String, + reference_answer: OpenAI::Models::FineTuning::Alpha::GraderRunParams::reference_answer, + request_options: OpenAI::RequestOptions + } + type grader = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader diff --git a/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs b/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs index fdc022c0..e54eb087 100644 --- a/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs +++ b/sig/openai/models/fine_tuning/alpha/grader_run_response.rbs @@ -26,6 +26,13 @@ module OpenAI sub_rewards: ::Hash[Symbol, top] ) -> void + def to_hash: -> { + metadata: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata, + model_grader_token_usage_per_model: ::Hash[Symbol, top], + reward: Float, + sub_rewards: ::Hash[Symbol, top] + } + type metadata = { errors: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors, @@ -62,6 +69,16 @@ module OpenAI type: String ) -> void + def to_hash: -> { + errors: OpenAI::Models::FineTuning::Alpha::GraderRunResponse::Metadata::Errors, + execution_time: Float, + name: String, + sampled_model_name: String?, + scores: ::Hash[Symbol, top], + token_usage: Integer?, + type: String + } + type errors = { formula_parse_error: bool, @@ -125,6 +142,23 @@ module OpenAI truncated_observation_error: bool, unresponsive_reward_error: bool ) -> void + + def to_hash: -> { + formula_parse_error: bool, + invalid_variable_error: bool, + model_grader_parse_error: bool, + model_grader_refusal_error: bool, + model_grader_server_error: bool, + model_grader_server_error_details: String?, + other_error: bool, + python_grader_runtime_error: bool, + python_grader_runtime_error_details: String?, + python_grader_server_error: bool, + python_grader_server_error_type: String?, + sample_parse_error: bool, + truncated_observation_error: bool, + unresponsive_reward_error: bool + } end end end diff --git a/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs b/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs index 8eec588b..c7e0f385 100644 --- a/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs +++ b/sig/openai/models/fine_tuning/alpha/grader_validate_params.rbs @@ -19,6 +19,11 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + grader: OpenAI::Models::FineTuning::Alpha::GraderValidateParams::grader, + request_options: OpenAI::RequestOptions + } + type grader = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader diff --git a/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs b/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs index a2139b77..939e54e2 100644 --- a/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs +++ b/sig/openai/models/fine_tuning/alpha/grader_validate_response.rbs @@ -18,6 +18,10 @@ module OpenAI ?grader: OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::grader ) -> void + def to_hash: -> { + grader: OpenAI::Models::FineTuning::Alpha::GraderValidateResponse::grader + } + type grader = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs index d8a52d89..b74273b6 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_create_params.rbs @@ -16,6 +16,11 @@ module OpenAI project_ids: ::Array[String], ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + project_ids: ::Array[String], + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs index 1bb85f1a..9e54b6cd 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_create_response.rbs @@ -25,6 +25,13 @@ module OpenAI project_id: String, ?object: :"checkpoint.permission" ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + object: :"checkpoint.permission", + project_id: String + } end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs index 78bbd44d..2d265756 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_delete_params.rbs @@ -16,6 +16,11 @@ module OpenAI fine_tuned_model_checkpoint: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + fine_tuned_model_checkpoint: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs index 7cf0427b..c2d55981 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_delete_response.rbs @@ -17,6 +17,12 @@ module OpenAI deleted: bool, ?object: :"checkpoint.permission" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"checkpoint.permission" + } end end end diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs index 0b609dbc..a76caaa1 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbs @@ -41,6 +41,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + limit: Integer, + order: OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::order, + project_id: String, + request_options: OpenAI::RequestOptions + } + type order = :ascending | :descending module Order diff --git a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs index db9a1b9d..54f9630a 100644 --- a/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs +++ b/sig/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rbs @@ -30,6 +30,14 @@ module OpenAI ?object: :list ) -> void + def to_hash: -> { + data: ::Array[OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveResponse::Data], + has_more: bool, + object: :list, + first_id: String?, + last_id: String? + } + type data = { id: String, @@ -53,6 +61,13 @@ module OpenAI project_id: String, ?object: :"checkpoint.permission" ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + object: :"checkpoint.permission", + project_id: String + } end end end diff --git a/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs b/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs index 49aa63fd..0abf70d0 100644 --- a/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs +++ b/sig/openai/models/fine_tuning/dpo_hyperparameters.rbs @@ -3,50 +3,57 @@ module OpenAI module FineTuning type dpo_hyperparameters = { - batch_size: OpenAI::FineTuning::DpoHyperparameters::batch_size, - beta: OpenAI::FineTuning::DpoHyperparameters::beta, - learning_rate_multiplier: OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::DpoHyperparameters::n_epochs + batch_size: OpenAI::Models::FineTuning::DpoHyperparameters::batch_size, + beta: OpenAI::Models::FineTuning::DpoHyperparameters::beta, + learning_rate_multiplier: OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs } class DpoHyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::DpoHyperparameters::batch_size? + attr_reader batch_size: OpenAI::Models::FineTuning::DpoHyperparameters::batch_size? def batch_size=: ( - OpenAI::FineTuning::DpoHyperparameters::batch_size - ) -> OpenAI::FineTuning::DpoHyperparameters::batch_size + OpenAI::Models::FineTuning::DpoHyperparameters::batch_size + ) -> OpenAI::Models::FineTuning::DpoHyperparameters::batch_size - attr_reader beta: OpenAI::FineTuning::DpoHyperparameters::beta? + attr_reader beta: OpenAI::Models::FineTuning::DpoHyperparameters::beta? def beta=: ( - OpenAI::FineTuning::DpoHyperparameters::beta - ) -> OpenAI::FineTuning::DpoHyperparameters::beta + OpenAI::Models::FineTuning::DpoHyperparameters::beta + ) -> OpenAI::Models::FineTuning::DpoHyperparameters::beta - attr_reader learning_rate_multiplier: OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier + OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier + ) -> OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::FineTuning::DpoHyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs? def n_epochs=: ( - OpenAI::FineTuning::DpoHyperparameters::n_epochs - ) -> OpenAI::FineTuning::DpoHyperparameters::n_epochs + OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs + ) -> OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::FineTuning::DpoHyperparameters::batch_size, - ?beta: OpenAI::FineTuning::DpoHyperparameters::beta, - ?learning_rate_multiplier: OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::DpoHyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::DpoHyperparameters::batch_size, + ?beta: OpenAI::Models::FineTuning::DpoHyperparameters::beta, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs ) -> void + def to_hash: -> { + batch_size: OpenAI::Models::FineTuning::DpoHyperparameters::batch_size, + beta: OpenAI::Models::FineTuning::DpoHyperparameters::beta, + learning_rate_multiplier: OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs + } + type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::batch_size] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::DpoHyperparameters::batch_size] end type beta = :auto | Float @@ -54,7 +61,7 @@ module OpenAI module Beta extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::beta] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::DpoHyperparameters::beta] end type learning_rate_multiplier = :auto | Float @@ -62,7 +69,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::learning_rate_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::DpoHyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -70,7 +77,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::DpoHyperparameters::n_epochs] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::DpoHyperparameters::n_epochs] end end end diff --git a/sig/openai/models/fine_tuning/dpo_method.rbs b/sig/openai/models/fine_tuning/dpo_method.rbs index 4284a080..094cebbf 100644 --- a/sig/openai/models/fine_tuning/dpo_method.rbs +++ b/sig/openai/models/fine_tuning/dpo_method.rbs @@ -14,6 +14,10 @@ module OpenAI def initialize: ( ?hyperparameters: OpenAI::FineTuning::DpoHyperparameters ) -> void + + def to_hash: -> { + hyperparameters: OpenAI::FineTuning::DpoHyperparameters + } end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job.rbs b/sig/openai/models/fine_tuning/fine_tuning_job.rbs index 59841d1e..a5368cef 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job.rbs @@ -16,7 +16,7 @@ module OpenAI organization_id: String, result_files: ::Array[String], seed: Integer, - status: OpenAI::FineTuning::FineTuningJob::status, + status: OpenAI::Models::FineTuning::FineTuningJob::status, trained_tokens: Integer?, training_file: String, validation_file: String?, @@ -49,7 +49,7 @@ module OpenAI attr_accessor seed: Integer - attr_accessor status: OpenAI::FineTuning::FineTuningJob::status + attr_accessor status: OpenAI::Models::FineTuning::FineTuningJob::status attr_accessor trained_tokens: Integer? @@ -80,7 +80,7 @@ module OpenAI organization_id: String, result_files: ::Array[String], seed: Integer, - status: OpenAI::FineTuning::FineTuningJob::status, + status: OpenAI::Models::FineTuning::FineTuningJob::status, trained_tokens: Integer?, training_file: String, validation_file: String?, @@ -91,6 +91,28 @@ module OpenAI ?object: :"fine_tuning.job" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + error: OpenAI::FineTuning::FineTuningJob::Error?, + fine_tuned_model: String?, + finished_at: Integer?, + hyperparameters: OpenAI::FineTuning::FineTuningJob::Hyperparameters, + model: String, + object: :"fine_tuning.job", + organization_id: String, + result_files: ::Array[String], + seed: Integer, + status: OpenAI::Models::FineTuning::FineTuningJob::status, + trained_tokens: Integer?, + training_file: String, + validation_file: String?, + estimated_finish: Integer?, + integrations: ::Array[OpenAI::FineTuning::FineTuningJobWandbIntegrationObject]?, + metadata: OpenAI::Models::metadata?, + method_: OpenAI::FineTuning::FineTuningJob::Method + } + type error = { code: String, message: String, param: String? } class Error < OpenAI::Internal::Type::BaseModel @@ -105,42 +127,50 @@ module OpenAI message: String, param: String? ) -> void + + def to_hash: -> { code: String, message: String, param: String? } end type hyperparameters = { - batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size?, - learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs + batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size?, + learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_accessor batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size? + attr_accessor batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size? - attr_reader learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier + OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier + ) -> OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs? def n_epochs=: ( - OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs - ) -> OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs + OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs + ) -> OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size?, - ?learning_rate_multiplier: OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size?, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs ) -> void + def to_hash: -> { + batch_size: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size?, + learning_rate_multiplier: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs + } + type batch_size = (top | :auto | Integer)? module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Hyperparameters::batch_size] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::batch_size] end type learning_rate_multiplier = :auto | Float @@ -148,7 +178,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -156,7 +186,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::FineTuningJob::Hyperparameters::n_epochs] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::n_epochs] end end @@ -178,19 +208,19 @@ module OpenAI FAILED: :failed CANCELLED: :cancelled - def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJob::status] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::status] end type method_ = { - type: OpenAI::FineTuning::FineTuningJob::Method::type_, + type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_, dpo: OpenAI::FineTuning::DpoMethod, reinforcement: OpenAI::FineTuning::ReinforcementMethod, supervised: OpenAI::FineTuning::SupervisedMethod } class Method < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::FineTuning::FineTuningJob::Method::type_ + attr_accessor type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_ attr_reader dpo: OpenAI::FineTuning::DpoMethod? @@ -211,12 +241,19 @@ module OpenAI ) -> OpenAI::FineTuning::SupervisedMethod def initialize: ( - type: OpenAI::FineTuning::FineTuningJob::Method::type_, + type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_, ?dpo: OpenAI::FineTuning::DpoMethod, ?reinforcement: OpenAI::FineTuning::ReinforcementMethod, ?supervised: OpenAI::FineTuning::SupervisedMethod ) -> void + def to_hash: -> { + type: OpenAI::Models::FineTuning::FineTuningJob::Method::type_, + dpo: OpenAI::FineTuning::DpoMethod, + reinforcement: OpenAI::FineTuning::ReinforcementMethod, + supervised: OpenAI::FineTuning::SupervisedMethod + } + type type_ = :supervised | :dpo | :reinforcement module Type @@ -226,7 +263,7 @@ module OpenAI DPO: :dpo REINFORCEMENT: :reinforcement - def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJob::Method::type_] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJob::Method::type_] end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs index 018e7195..b1c4f9c4 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_event.rbs @@ -7,11 +7,11 @@ module OpenAI { id: String, created_at: Integer, - level: OpenAI::FineTuning::FineTuningJobEvent::level, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, message: String, object: :"fine_tuning.job.event", data: top, - type: OpenAI::FineTuning::FineTuningJobEvent::type_ + type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_ } class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel @@ -19,7 +19,7 @@ module OpenAI attr_accessor created_at: Integer - attr_accessor level: OpenAI::FineTuning::FineTuningJobEvent::level + attr_accessor level: OpenAI::Models::FineTuning::FineTuningJobEvent::level attr_accessor message: String @@ -29,22 +29,32 @@ module OpenAI def data=: (top) -> top - attr_reader type: OpenAI::FineTuning::FineTuningJobEvent::type_? + attr_reader type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_? def type=: ( - OpenAI::FineTuning::FineTuningJobEvent::type_ - ) -> OpenAI::FineTuning::FineTuningJobEvent::type_ + OpenAI::Models::FineTuning::FineTuningJobEvent::type_ + ) -> OpenAI::Models::FineTuning::FineTuningJobEvent::type_ def initialize: ( id: String, created_at: Integer, - level: OpenAI::FineTuning::FineTuningJobEvent::level, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, message: String, ?data: top, - ?type: OpenAI::FineTuning::FineTuningJobEvent::type_, + ?type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_, ?object: :"fine_tuning.job.event" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + level: OpenAI::Models::FineTuning::FineTuningJobEvent::level, + message: String, + object: :"fine_tuning.job.event", + data: top, + type: OpenAI::Models::FineTuning::FineTuningJobEvent::type_ + } + type level = :info | :warn | :error module Level @@ -54,7 +64,7 @@ module OpenAI WARN: :warn ERROR: :error - def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJobEvent::level] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::level] end type type_ = :message | :metrics @@ -65,7 +75,7 @@ module OpenAI MESSAGE: :message METRICS: :metrics - def self?.values: -> ::Array[OpenAI::FineTuning::FineTuningJobEvent::type_] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::FineTuningJobEvent::type_] end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs index a1e46c1a..1af84b2b 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rbs @@ -28,6 +28,13 @@ module OpenAI ?name: String?, ?tags: ::Array[String] ) -> void + + def to_hash: -> { + project: String, + entity: String?, + name: String?, + tags: ::Array[String] + } end end end diff --git a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs index cef52fc6..aeb6caad 100644 --- a/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs +++ b/sig/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rbs @@ -18,6 +18,11 @@ module OpenAI wandb: OpenAI::FineTuning::FineTuningJobWandbIntegration, ?type: :wandb ) -> void + + def to_hash: -> { + type: :wandb, + wandb: OpenAI::FineTuning::FineTuningJobWandbIntegration + } end end end diff --git a/sig/openai/models/fine_tuning/job_cancel_params.rbs b/sig/openai/models/fine_tuning/job_cancel_params.rbs index bba98884..2548b37f 100644 --- a/sig/openai/models/fine_tuning/job_cancel_params.rbs +++ b/sig/openai/models/fine_tuning/job_cancel_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/fine_tuning/job_create_params.rbs b/sig/openai/models/fine_tuning/job_create_params.rbs index 37e6178d..f1f03e19 100644 --- a/sig/openai/models/fine_tuning/job_create_params.rbs +++ b/sig/openai/models/fine_tuning/job_create_params.rbs @@ -58,6 +58,19 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + model: OpenAI::Models::FineTuning::JobCreateParams::model, + training_file: String, + hyperparameters: OpenAI::FineTuning::JobCreateParams::Hyperparameters, + integrations: ::Array[OpenAI::FineTuning::JobCreateParams::Integration]?, + metadata: OpenAI::Models::metadata?, + method_: OpenAI::FineTuning::JobCreateParams::Method, + seed: Integer?, + suffix: String?, + validation_file: String?, + request_options: OpenAI::RequestOptions + } + type model = String | :"babbage-002" @@ -78,42 +91,48 @@ module OpenAI type hyperparameters = { - batch_size: OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size, - learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs + batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, + learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs } class Hyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size? + attr_reader batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size? def batch_size=: ( - OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size - ) -> OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size + OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size + ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size - attr_reader learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier + OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier + ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs? def n_epochs=: ( - OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs - ) -> OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs + OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs + ) -> OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs ) -> void + def to_hash: -> { + batch_size: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size, + learning_rate_multiplier: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs + } + type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Hyperparameters::batch_size] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::batch_size] end type learning_rate_multiplier = :auto | Float @@ -121,7 +140,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -129,7 +148,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::JobCreateParams::Hyperparameters::n_epochs] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::n_epochs] end end @@ -149,6 +168,11 @@ module OpenAI ?type: :wandb ) -> void + def to_hash: -> { + type: :wandb, + wandb: OpenAI::FineTuning::JobCreateParams::Integration::Wandb + } + type wandb = { project: String, @@ -174,19 +198,26 @@ module OpenAI ?name: String?, ?tags: ::Array[String] ) -> void + + def to_hash: -> { + project: String, + entity: String?, + name: String?, + tags: ::Array[String] + } end end type method_ = { - type: OpenAI::FineTuning::JobCreateParams::Method::type_, + type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_, dpo: OpenAI::FineTuning::DpoMethod, reinforcement: OpenAI::FineTuning::ReinforcementMethod, supervised: OpenAI::FineTuning::SupervisedMethod } class Method < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::FineTuning::JobCreateParams::Method::type_ + attr_accessor type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_ attr_reader dpo: OpenAI::FineTuning::DpoMethod? @@ -207,12 +238,19 @@ module OpenAI ) -> OpenAI::FineTuning::SupervisedMethod def initialize: ( - type: OpenAI::FineTuning::JobCreateParams::Method::type_, + type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_, ?dpo: OpenAI::FineTuning::DpoMethod, ?reinforcement: OpenAI::FineTuning::ReinforcementMethod, ?supervised: OpenAI::FineTuning::SupervisedMethod ) -> void + def to_hash: -> { + type: OpenAI::Models::FineTuning::JobCreateParams::Method::type_, + dpo: OpenAI::FineTuning::DpoMethod, + reinforcement: OpenAI::FineTuning::ReinforcementMethod, + supervised: OpenAI::FineTuning::SupervisedMethod + } + type type_ = :supervised | :dpo | :reinforcement module Type @@ -222,7 +260,7 @@ module OpenAI DPO: :dpo REINFORCEMENT: :reinforcement - def self?.values: -> ::Array[OpenAI::FineTuning::JobCreateParams::Method::type_] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::JobCreateParams::Method::type_] end end end diff --git a/sig/openai/models/fine_tuning/job_list_events_params.rbs b/sig/openai/models/fine_tuning/job_list_events_params.rbs index f3d26bd2..42a77bac 100644 --- a/sig/openai/models/fine_tuning/job_list_events_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_events_params.rbs @@ -22,6 +22,12 @@ module OpenAI ?limit: Integer, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + after: String, + limit: Integer, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/fine_tuning/job_list_params.rbs b/sig/openai/models/fine_tuning/job_list_params.rbs index db301888..d45893f4 100644 --- a/sig/openai/models/fine_tuning/job_list_params.rbs +++ b/sig/openai/models/fine_tuning/job_list_params.rbs @@ -25,6 +25,13 @@ module OpenAI ?metadata: ::Hash[Symbol, String]?, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + after: String, + limit: Integer, + metadata: ::Hash[Symbol, String]?, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/fine_tuning/job_pause_params.rbs b/sig/openai/models/fine_tuning/job_pause_params.rbs index a5ac0d51..3b55e229 100644 --- a/sig/openai/models/fine_tuning/job_pause_params.rbs +++ b/sig/openai/models/fine_tuning/job_pause_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/fine_tuning/job_resume_params.rbs b/sig/openai/models/fine_tuning/job_resume_params.rbs index fa50a15b..3a1d7da9 100644 --- a/sig/openai/models/fine_tuning/job_resume_params.rbs +++ b/sig/openai/models/fine_tuning/job_resume_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/fine_tuning/job_retrieve_params.rbs b/sig/openai/models/fine_tuning/job_retrieve_params.rbs index ed195b91..1535b93a 100644 --- a/sig/openai/models/fine_tuning/job_retrieve_params.rbs +++ b/sig/openai/models/fine_tuning/job_retrieve_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs index 2cf57298..28b14de0 100644 --- a/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs +++ b/sig/openai/models/fine_tuning/jobs/checkpoint_list_params.rbs @@ -23,6 +23,12 @@ module OpenAI ?limit: Integer, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + after: String, + limit: Integer, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs index 51e7737c..15da3658 100644 --- a/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs +++ b/sig/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rbs @@ -38,6 +38,16 @@ module OpenAI ?object: :"fine_tuning.job.checkpoint" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + fine_tuned_model_checkpoint: String, + fine_tuning_job_id: String, + metrics: OpenAI::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics, + object: :"fine_tuning.job.checkpoint", + step_number: Integer + } + type metrics = { full_valid_loss: Float, @@ -87,6 +97,16 @@ module OpenAI ?valid_loss: Float, ?valid_mean_token_accuracy: Float ) -> void + + def to_hash: -> { + full_valid_loss: Float, + full_valid_mean_token_accuracy: Float, + step: Float, + train_loss: Float, + train_mean_token_accuracy: Float, + valid_loss: Float, + valid_mean_token_accuracy: Float + } end end end diff --git a/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs b/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs index 64891385..fdd3cc5b 100644 --- a/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs +++ b/sig/openai/models/fine_tuning/reinforcement_hyperparameters.rbs @@ -3,74 +3,84 @@ module OpenAI module FineTuning type reinforcement_hyperparameters = { - batch_size: OpenAI::FineTuning::ReinforcementHyperparameters::batch_size, - compute_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier, - eval_interval: OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval, - eval_samples: OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples, - learning_rate_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs, - reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort + batch_size: OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size, + compute_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier, + eval_interval: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval, + eval_samples: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples, + learning_rate_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs, + reasoning_effort: OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort } class ReinforcementHyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::ReinforcementHyperparameters::batch_size? + attr_reader batch_size: OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size? def batch_size=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::batch_size - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::batch_size + OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size - attr_reader compute_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier? + attr_reader compute_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier? def compute_multiplier=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier + OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier - attr_reader eval_interval: OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval? + attr_reader eval_interval: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval? def eval_interval=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval + OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval - attr_reader eval_samples: OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples? + attr_reader eval_samples: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples? def eval_samples=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples + OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples - attr_reader learning_rate_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier + OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs? def n_epochs=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs + OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs - attr_reader reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort? + attr_reader reasoning_effort: OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort? def reasoning_effort=: ( - OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort - ) -> OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort + OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort + ) -> OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort def initialize: ( - ?batch_size: OpenAI::FineTuning::ReinforcementHyperparameters::batch_size, - ?compute_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier, - ?eval_interval: OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval, - ?eval_samples: OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples, - ?learning_rate_multiplier: OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs, - ?reasoning_effort: OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort + ?batch_size: OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size, + ?compute_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier, + ?eval_interval: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval, + ?eval_samples: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs, + ?reasoning_effort: OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort ) -> void + def to_hash: -> { + batch_size: OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size, + compute_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier, + eval_interval: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval, + eval_samples: OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples, + learning_rate_multiplier: OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs, + reasoning_effort: OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort + } + type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::batch_size] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::batch_size] end type compute_multiplier = :auto | Float @@ -78,7 +88,7 @@ module OpenAI module ComputeMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::compute_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::compute_multiplier] end type eval_interval = :auto | Integer @@ -86,7 +96,7 @@ module OpenAI module EvalInterval extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::eval_interval] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_interval] end type eval_samples = :auto | Integer @@ -94,7 +104,7 @@ module OpenAI module EvalSamples extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::eval_samples] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::eval_samples] end type learning_rate_multiplier = :auto | Float @@ -102,7 +112,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -110,7 +120,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::n_epochs] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::n_epochs] end type reasoning_effort = :default | :low | :medium | :high @@ -123,7 +133,7 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self?.values: -> ::Array[OpenAI::FineTuning::ReinforcementHyperparameters::reasoning_effort] + def self?.values: -> ::Array[OpenAI::Models::FineTuning::ReinforcementHyperparameters::reasoning_effort] end end end diff --git a/sig/openai/models/fine_tuning/reinforcement_method.rbs b/sig/openai/models/fine_tuning/reinforcement_method.rbs index 4205559a..3f637b95 100644 --- a/sig/openai/models/fine_tuning/reinforcement_method.rbs +++ b/sig/openai/models/fine_tuning/reinforcement_method.rbs @@ -3,12 +3,12 @@ module OpenAI module FineTuning type reinforcement_method = { - grader: OpenAI::FineTuning::ReinforcementMethod::grader, + grader: OpenAI::Models::FineTuning::ReinforcementMethod::grader, hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters } class ReinforcementMethod < OpenAI::Internal::Type::BaseModel - attr_accessor grader: OpenAI::FineTuning::ReinforcementMethod::grader + attr_accessor grader: OpenAI::Models::FineTuning::ReinforcementMethod::grader attr_reader hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters? @@ -17,10 +17,15 @@ module OpenAI ) -> OpenAI::FineTuning::ReinforcementHyperparameters def initialize: ( - grader: OpenAI::FineTuning::ReinforcementMethod::grader, + grader: OpenAI::Models::FineTuning::ReinforcementMethod::grader, ?hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters ) -> void + def to_hash: -> { + grader: OpenAI::Models::FineTuning::ReinforcementMethod::grader, + hyperparameters: OpenAI::FineTuning::ReinforcementHyperparameters + } + type grader = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader @@ -31,7 +36,7 @@ module OpenAI module Grader extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::ReinforcementMethod::grader] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::ReinforcementMethod::grader] end end end diff --git a/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs b/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs index 7ca1ee8f..ce01622a 100644 --- a/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs +++ b/sig/openai/models/fine_tuning/supervised_hyperparameters.rbs @@ -3,42 +3,48 @@ module OpenAI module FineTuning type supervised_hyperparameters = { - batch_size: OpenAI::FineTuning::SupervisedHyperparameters::batch_size, - learning_rate_multiplier: OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, - n_epochs: OpenAI::FineTuning::SupervisedHyperparameters::n_epochs + batch_size: OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size, + learning_rate_multiplier: OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs } class SupervisedHyperparameters < OpenAI::Internal::Type::BaseModel - attr_reader batch_size: OpenAI::FineTuning::SupervisedHyperparameters::batch_size? + attr_reader batch_size: OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size? def batch_size=: ( - OpenAI::FineTuning::SupervisedHyperparameters::batch_size - ) -> OpenAI::FineTuning::SupervisedHyperparameters::batch_size + OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size + ) -> OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size - attr_reader learning_rate_multiplier: OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier? + attr_reader learning_rate_multiplier: OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier? def learning_rate_multiplier=: ( - OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier - ) -> OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier + OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier + ) -> OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier - attr_reader n_epochs: OpenAI::FineTuning::SupervisedHyperparameters::n_epochs? + attr_reader n_epochs: OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs? def n_epochs=: ( - OpenAI::FineTuning::SupervisedHyperparameters::n_epochs - ) -> OpenAI::FineTuning::SupervisedHyperparameters::n_epochs + OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs + ) -> OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs def initialize: ( - ?batch_size: OpenAI::FineTuning::SupervisedHyperparameters::batch_size, - ?learning_rate_multiplier: OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, - ?n_epochs: OpenAI::FineTuning::SupervisedHyperparameters::n_epochs + ?batch_size: OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size, + ?learning_rate_multiplier: OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, + ?n_epochs: OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs ) -> void + def to_hash: -> { + batch_size: OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size, + learning_rate_multiplier: OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier, + n_epochs: OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs + } + type batch_size = :auto | Integer module BatchSize extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::SupervisedHyperparameters::batch_size] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::SupervisedHyperparameters::batch_size] end type learning_rate_multiplier = :auto | Float @@ -46,7 +52,7 @@ module OpenAI module LearningRateMultiplier extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::SupervisedHyperparameters::learning_rate_multiplier] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::SupervisedHyperparameters::learning_rate_multiplier] end type n_epochs = :auto | Integer @@ -54,7 +60,7 @@ module OpenAI module NEpochs extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::FineTuning::SupervisedHyperparameters::n_epochs] + def self?.variants: -> ::Array[OpenAI::Models::FineTuning::SupervisedHyperparameters::n_epochs] end end end diff --git a/sig/openai/models/fine_tuning/supervised_method.rbs b/sig/openai/models/fine_tuning/supervised_method.rbs index eccaf9d3..dbebf97e 100644 --- a/sig/openai/models/fine_tuning/supervised_method.rbs +++ b/sig/openai/models/fine_tuning/supervised_method.rbs @@ -14,6 +14,10 @@ module OpenAI def initialize: ( ?hyperparameters: OpenAI::FineTuning::SupervisedHyperparameters ) -> void + + def to_hash: -> { + hyperparameters: OpenAI::FineTuning::SupervisedHyperparameters + } end end end diff --git a/sig/openai/models/function_definition.rbs b/sig/openai/models/function_definition.rbs index 40bf94b9..7a9696f3 100644 --- a/sig/openai/models/function_definition.rbs +++ b/sig/openai/models/function_definition.rbs @@ -29,6 +29,13 @@ module OpenAI ?parameters: OpenAI::Models::function_parameters, ?strict: bool? ) -> void + + def to_hash: -> { + name: String, + description: String, + parameters: OpenAI::Models::function_parameters, + strict: bool? + } end end end diff --git a/sig/openai/models/graders/label_model_grader.rbs b/sig/openai/models/graders/label_model_grader.rbs index 381664df..e6970199 100644 --- a/sig/openai/models/graders/label_model_grader.rbs +++ b/sig/openai/models/graders/label_model_grader.rbs @@ -5,7 +5,7 @@ module OpenAI module Graders type label_model_grader = { - input: ::Array[OpenAI::EvalItem], + input: ::Array[OpenAI::Graders::LabelModelGrader::Input], labels: ::Array[String], model: String, name: String, @@ -14,7 +14,7 @@ module OpenAI } class LabelModelGrader < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::EvalItem] + attr_accessor input: ::Array[OpenAI::Graders::LabelModelGrader::Input] attr_accessor labels: ::Array[String] @@ -27,13 +27,99 @@ module OpenAI attr_accessor type: :label_model def initialize: ( - input: ::Array[OpenAI::EvalItem], + input: ::Array[OpenAI::Graders::LabelModelGrader::Input], labels: ::Array[String], model: String, name: String, passing_labels: ::Array[String], ?type: :label_model ) -> void + + def to_hash: -> { + input: ::Array[OpenAI::Graders::LabelModelGrader::Input], + labels: ::Array[String], + model: String, + name: String, + passing_labels: ::Array[String], + type: :label_model + } + + type input = + { + content: OpenAI::Models::Graders::LabelModelGrader::Input::content, + role: OpenAI::Models::Graders::LabelModelGrader::Input::role, + type: OpenAI::Models::Graders::LabelModelGrader::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Graders::LabelModelGrader::Input::content + + attr_accessor role: OpenAI::Models::Graders::LabelModelGrader::Input::role + + attr_reader type: OpenAI::Models::Graders::LabelModelGrader::Input::type_? + + def type=: ( + OpenAI::Models::Graders::LabelModelGrader::Input::type_ + ) -> OpenAI::Models::Graders::LabelModelGrader::Input::type_ + + def initialize: ( + content: OpenAI::Models::Graders::LabelModelGrader::Input::content, + role: OpenAI::Models::Graders::LabelModelGrader::Input::role, + ?type: OpenAI::Models::Graders::LabelModelGrader::Input::type_ + ) -> void + + def to_hash: -> { + content: OpenAI::Models::Graders::LabelModelGrader::Input::content, + role: OpenAI::Models::Graders::LabelModelGrader::Input::role, + type: OpenAI::Models::Graders::LabelModelGrader::Input::type_ + } + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Graders::LabelModelGrader::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> { text: String, type: :output_text } + end + + def self?.variants: -> ::Array[OpenAI::Models::Graders::LabelModelGrader::Input::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Graders::LabelModelGrader::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Graders::LabelModelGrader::Input::type_] + end + end end end end diff --git a/sig/openai/models/graders/multi_grader.rbs b/sig/openai/models/graders/multi_grader.rbs index 295e5177..206d8144 100644 --- a/sig/openai/models/graders/multi_grader.rbs +++ b/sig/openai/models/graders/multi_grader.rbs @@ -6,7 +6,7 @@ module OpenAI type multi_grader = { calculate_output: String, - graders: ::Hash[Symbol, OpenAI::Graders::MultiGrader::grader], + graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader], name: String, type: :multi } @@ -14,7 +14,7 @@ module OpenAI class MultiGrader < OpenAI::Internal::Type::BaseModel attr_accessor calculate_output: String - attr_accessor graders: ::Hash[Symbol, OpenAI::Graders::MultiGrader::grader] + attr_accessor graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader] attr_accessor name: String @@ -22,11 +22,18 @@ module OpenAI def initialize: ( calculate_output: String, - graders: ::Hash[Symbol, OpenAI::Graders::MultiGrader::grader], + graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader], name: String, ?type: :multi ) -> void + def to_hash: -> { + calculate_output: String, + graders: ::Hash[Symbol, OpenAI::Models::Graders::MultiGrader::grader], + name: String, + type: :multi + } + type grader = OpenAI::Graders::StringCheckGrader | OpenAI::Graders::TextSimilarityGrader @@ -37,7 +44,7 @@ module OpenAI module Grader extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Graders::MultiGrader::grader] + def self?.variants: -> ::Array[OpenAI::Models::Graders::MultiGrader::grader] end end end diff --git a/sig/openai/models/graders/python_grader.rbs b/sig/openai/models/graders/python_grader.rbs index e23c7c85..fdd1b7ac 100644 --- a/sig/openai/models/graders/python_grader.rbs +++ b/sig/openai/models/graders/python_grader.rbs @@ -23,6 +23,13 @@ module OpenAI ?image_tag: String, ?type: :python ) -> void + + def to_hash: -> { + name: String, + source: String, + type: :python, + image_tag: String + } end end end diff --git a/sig/openai/models/graders/score_model_grader.rbs b/sig/openai/models/graders/score_model_grader.rbs index 7783564c..caf20048 100644 --- a/sig/openai/models/graders/score_model_grader.rbs +++ b/sig/openai/models/graders/score_model_grader.rbs @@ -5,7 +5,7 @@ module OpenAI module Graders type score_model_grader = { - input: ::Array[OpenAI::EvalItem], + input: ::Array[OpenAI::Graders::ScoreModelGrader::Input], model: String, name: String, type: :score_model, @@ -14,7 +14,7 @@ module OpenAI } class ScoreModelGrader < OpenAI::Internal::Type::BaseModel - attr_accessor input: ::Array[OpenAI::EvalItem] + attr_accessor input: ::Array[OpenAI::Graders::ScoreModelGrader::Input] attr_accessor model: String @@ -31,13 +31,99 @@ module OpenAI def sampling_params=: (top) -> top def initialize: ( - input: ::Array[OpenAI::EvalItem], + input: ::Array[OpenAI::Graders::ScoreModelGrader::Input], model: String, name: String, ?range: ::Array[Float], ?sampling_params: top, ?type: :score_model ) -> void + + def to_hash: -> { + input: ::Array[OpenAI::Graders::ScoreModelGrader::Input], + model: String, + name: String, + type: :score_model, + range: ::Array[Float], + sampling_params: top + } + + type input = + { + content: OpenAI::Models::Graders::ScoreModelGrader::Input::content, + role: OpenAI::Models::Graders::ScoreModelGrader::Input::role, + type: OpenAI::Models::Graders::ScoreModelGrader::Input::type_ + } + + class Input < OpenAI::Internal::Type::BaseModel + attr_accessor content: OpenAI::Models::Graders::ScoreModelGrader::Input::content + + attr_accessor role: OpenAI::Models::Graders::ScoreModelGrader::Input::role + + attr_reader type: OpenAI::Models::Graders::ScoreModelGrader::Input::type_? + + def type=: ( + OpenAI::Models::Graders::ScoreModelGrader::Input::type_ + ) -> OpenAI::Models::Graders::ScoreModelGrader::Input::type_ + + def initialize: ( + content: OpenAI::Models::Graders::ScoreModelGrader::Input::content, + role: OpenAI::Models::Graders::ScoreModelGrader::Input::role, + ?type: OpenAI::Models::Graders::ScoreModelGrader::Input::type_ + ) -> void + + def to_hash: -> { + content: OpenAI::Models::Graders::ScoreModelGrader::Input::content, + role: OpenAI::Models::Graders::ScoreModelGrader::Input::role, + type: OpenAI::Models::Graders::ScoreModelGrader::Input::type_ + } + + type content = + String + | OpenAI::Responses::ResponseInputText + | OpenAI::Graders::ScoreModelGrader::Input::Content::OutputText + + module Content + extend OpenAI::Internal::Type::Union + + type output_text = { text: String, type: :output_text } + + class OutputText < OpenAI::Internal::Type::BaseModel + attr_accessor text: String + + attr_accessor type: :output_text + + def initialize: (text: String, ?type: :output_text) -> void + + def to_hash: -> { text: String, type: :output_text } + end + + def self?.variants: -> ::Array[OpenAI::Models::Graders::ScoreModelGrader::Input::content] + end + + type role = :user | :assistant | :system | :developer + + module Role + extend OpenAI::Internal::Type::Enum + + USER: :user + ASSISTANT: :assistant + SYSTEM: :system + DEVELOPER: :developer + + def self?.values: -> ::Array[OpenAI::Models::Graders::ScoreModelGrader::Input::role] + end + + type type_ = :message + + module Type + extend OpenAI::Internal::Type::Enum + + MESSAGE: :message + + def self?.values: -> ::Array[OpenAI::Models::Graders::ScoreModelGrader::Input::type_] + end + end end end end diff --git a/sig/openai/models/graders/string_check_grader.rbs b/sig/openai/models/graders/string_check_grader.rbs index 5f60ffb0..039498de 100644 --- a/sig/openai/models/graders/string_check_grader.rbs +++ b/sig/openai/models/graders/string_check_grader.rbs @@ -7,7 +7,7 @@ module OpenAI { input: String, name: String, - operation: OpenAI::Graders::StringCheckGrader::operation, + operation: OpenAI::Models::Graders::StringCheckGrader::operation, reference: String, type: :string_check } @@ -17,7 +17,7 @@ module OpenAI attr_accessor name: String - attr_accessor operation: OpenAI::Graders::StringCheckGrader::operation + attr_accessor operation: OpenAI::Models::Graders::StringCheckGrader::operation attr_accessor reference: String @@ -26,11 +26,19 @@ module OpenAI def initialize: ( input: String, name: String, - operation: OpenAI::Graders::StringCheckGrader::operation, + operation: OpenAI::Models::Graders::StringCheckGrader::operation, reference: String, ?type: :string_check ) -> void + def to_hash: -> { + input: String, + name: String, + operation: OpenAI::Models::Graders::StringCheckGrader::operation, + reference: String, + type: :string_check + } + type operation = :eq | :ne | :like | :ilike module Operation @@ -41,7 +49,7 @@ module OpenAI LIKE: :like ILIKE: :ilike - def self?.values: -> ::Array[OpenAI::Graders::StringCheckGrader::operation] + def self?.values: -> ::Array[OpenAI::Models::Graders::StringCheckGrader::operation] end end end diff --git a/sig/openai/models/graders/text_similarity_grader.rbs b/sig/openai/models/graders/text_similarity_grader.rbs index 24453b12..9002b540 100644 --- a/sig/openai/models/graders/text_similarity_grader.rbs +++ b/sig/openai/models/graders/text_similarity_grader.rbs @@ -5,7 +5,7 @@ module OpenAI module Graders type text_similarity_grader = { - evaluation_metric: OpenAI::Graders::TextSimilarityGrader::evaluation_metric, + evaluation_metric: OpenAI::Models::Graders::TextSimilarityGrader::evaluation_metric, input: String, name: String, reference: String, @@ -13,7 +13,7 @@ module OpenAI } class TextSimilarityGrader < OpenAI::Internal::Type::BaseModel - attr_accessor evaluation_metric: OpenAI::Graders::TextSimilarityGrader::evaluation_metric + attr_accessor evaluation_metric: OpenAI::Models::Graders::TextSimilarityGrader::evaluation_metric attr_accessor input: String @@ -24,13 +24,21 @@ module OpenAI attr_accessor type: :text_similarity def initialize: ( - evaluation_metric: OpenAI::Graders::TextSimilarityGrader::evaluation_metric, + evaluation_metric: OpenAI::Models::Graders::TextSimilarityGrader::evaluation_metric, input: String, name: String, reference: String, ?type: :text_similarity ) -> void + def to_hash: -> { + evaluation_metric: OpenAI::Models::Graders::TextSimilarityGrader::evaluation_metric, + input: String, + name: String, + reference: String, + type: :text_similarity + } + type evaluation_metric = :fuzzy_match | :bleu @@ -57,7 +65,7 @@ module OpenAI ROUGE_5: :rouge_5 ROUGE_L: :rouge_l - def self?.values: -> ::Array[OpenAI::Graders::TextSimilarityGrader::evaluation_metric] + def self?.values: -> ::Array[OpenAI::Models::Graders::TextSimilarityGrader::evaluation_metric] end end end diff --git a/sig/openai/models/image.rbs b/sig/openai/models/image.rbs index 96a4fccc..fd7554b0 100644 --- a/sig/openai/models/image.rbs +++ b/sig/openai/models/image.rbs @@ -20,6 +20,12 @@ module OpenAI ?revised_prompt: String, ?url: String ) -> void + + def to_hash: -> { + :b64_json => String, + revised_prompt: String, + url: String + } end end end diff --git a/sig/openai/models/image_create_variation_params.rbs b/sig/openai/models/image_create_variation_params.rbs index e47b6178..47601c14 100644 --- a/sig/openai/models/image_create_variation_params.rbs +++ b/sig/openai/models/image_create_variation_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type image_create_variation_params = { - image: (Pathname | StringIO | IO | OpenAI::FilePart), + image: OpenAI::Internal::file_input, model: OpenAI::Models::ImageCreateVariationParams::model?, n: Integer?, response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, @@ -15,7 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor image: Pathname | StringIO | IO | OpenAI::FilePart + attr_accessor image: OpenAI::Internal::file_input attr_accessor model: OpenAI::Models::ImageCreateVariationParams::model? @@ -30,7 +30,7 @@ module OpenAI def user=: (String) -> String def initialize: ( - image: Pathname | StringIO | IO | OpenAI::FilePart, + image: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageCreateVariationParams::model?, ?n: Integer?, ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, @@ -39,6 +39,16 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + image: OpenAI::Internal::file_input, + model: OpenAI::Models::ImageCreateVariationParams::model?, + n: Integer?, + response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, + size: OpenAI::Models::ImageCreateVariationParams::size?, + user: String, + request_options: OpenAI::RequestOptions + } + type model = String | OpenAI::Models::image_model module Model diff --git a/sig/openai/models/image_edit_params.rbs b/sig/openai/models/image_edit_params.rbs index 75a3c9e4..0fe6ec3d 100644 --- a/sig/openai/models/image_edit_params.rbs +++ b/sig/openai/models/image_edit_params.rbs @@ -5,7 +5,7 @@ module OpenAI image: OpenAI::Models::ImageEditParams::image, prompt: String, background: OpenAI::Models::ImageEditParams::background?, - mask: (Pathname | StringIO | IO | OpenAI::FilePart), + mask: OpenAI::Internal::file_input, model: OpenAI::Models::ImageEditParams::model?, n: Integer?, quality: OpenAI::Models::ImageEditParams::quality?, @@ -25,11 +25,9 @@ module OpenAI attr_accessor background: OpenAI::Models::ImageEditParams::background? - attr_reader mask: (Pathname | StringIO | IO | OpenAI::FilePart)? + attr_reader mask: OpenAI::Internal::file_input? - def mask=: ( - Pathname | StringIO | IO | OpenAI::FilePart - ) -> (Pathname | StringIO | IO | OpenAI::FilePart) + def mask=: (OpenAI::Internal::file_input) -> OpenAI::Internal::file_input attr_accessor model: OpenAI::Models::ImageEditParams::model? @@ -49,7 +47,7 @@ module OpenAI image: OpenAI::Models::ImageEditParams::image, prompt: String, ?background: OpenAI::Models::ImageEditParams::background?, - ?mask: Pathname | StringIO | IO | OpenAI::FilePart, + ?mask: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, ?quality: OpenAI::Models::ImageEditParams::quality?, @@ -59,12 +57,22 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + image: OpenAI::Models::ImageEditParams::image, + prompt: String, + background: OpenAI::Models::ImageEditParams::background?, + mask: OpenAI::Internal::file_input, + model: OpenAI::Models::ImageEditParams::model?, + n: Integer?, + quality: OpenAI::Models::ImageEditParams::quality?, + response_format: OpenAI::Models::ImageEditParams::response_format?, + size: OpenAI::Models::ImageEditParams::size?, + user: String, + request_options: OpenAI::RequestOptions + } + type image = - Pathname - | StringIO - | IO - | OpenAI::FilePart - | ::Array[Pathname | StringIO | IO | OpenAI::FilePart] + OpenAI::Internal::file_input | ::Array[OpenAI::Internal::file_input] module Image extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/image_generate_params.rbs b/sig/openai/models/image_generate_params.rbs index d6367505..e9b0e2e0 100644 --- a/sig/openai/models/image_generate_params.rbs +++ b/sig/openai/models/image_generate_params.rbs @@ -63,6 +63,22 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + prompt: String, + background: OpenAI::Models::ImageGenerateParams::background?, + model: OpenAI::Models::ImageGenerateParams::model?, + moderation: OpenAI::Models::ImageGenerateParams::moderation?, + n: Integer?, + output_compression: Integer?, + output_format: OpenAI::Models::ImageGenerateParams::output_format?, + quality: OpenAI::Models::ImageGenerateParams::quality?, + response_format: OpenAI::Models::ImageGenerateParams::response_format?, + size: OpenAI::Models::ImageGenerateParams::size?, + style: OpenAI::Models::ImageGenerateParams::style?, + user: String, + request_options: OpenAI::RequestOptions + } + type background = :transparent | :opaque | :auto module Background diff --git a/sig/openai/models/images_response.rbs b/sig/openai/models/images_response.rbs index da6579dc..fd899bb5 100644 --- a/sig/openai/models/images_response.rbs +++ b/sig/openai/models/images_response.rbs @@ -26,6 +26,12 @@ module OpenAI ?usage: OpenAI::ImagesResponse::Usage ) -> void + def to_hash: -> { + created: Integer, + data: ::Array[OpenAI::Image], + usage: OpenAI::ImagesResponse::Usage + } + type usage = { input_tokens: Integer, @@ -50,6 +56,13 @@ module OpenAI total_tokens: Integer ) -> void + def to_hash: -> { + input_tokens: Integer, + input_tokens_details: OpenAI::ImagesResponse::Usage::InputTokensDetails, + output_tokens: Integer, + total_tokens: Integer + } + type input_tokens_details = { image_tokens: Integer, text_tokens: Integer } @@ -59,6 +72,8 @@ module OpenAI attr_accessor text_tokens: Integer def initialize: (image_tokens: Integer, text_tokens: Integer) -> void + + def to_hash: -> { image_tokens: Integer, text_tokens: Integer } end end end diff --git a/sig/openai/models/model.rbs b/sig/openai/models/model.rbs index 79402173..f6d604c9 100644 --- a/sig/openai/models/model.rbs +++ b/sig/openai/models/model.rbs @@ -18,6 +18,13 @@ module OpenAI owned_by: String, ?object: :model ) -> void + + def to_hash: -> { + id: String, + created: Integer, + object: :model, + owned_by: String + } end end end diff --git a/sig/openai/models/model_delete_params.rbs b/sig/openai/models/model_delete_params.rbs index 16285da7..68406d8c 100644 --- a/sig/openai/models/model_delete_params.rbs +++ b/sig/openai/models/model_delete_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/model_deleted.rbs b/sig/openai/models/model_deleted.rbs index b95b2a67..2676f627 100644 --- a/sig/openai/models/model_deleted.rbs +++ b/sig/openai/models/model_deleted.rbs @@ -10,6 +10,8 @@ module OpenAI attr_accessor object: String def initialize: (id: String, deleted: bool, object: String) -> void + + def to_hash: -> { id: String, deleted: bool, object: String } end end end diff --git a/sig/openai/models/model_list_params.rbs b/sig/openai/models/model_list_params.rbs index 37d678e8..915bdcf6 100644 --- a/sig/openai/models/model_list_params.rbs +++ b/sig/openai/models/model_list_params.rbs @@ -7,6 +7,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/model_retrieve_params.rbs b/sig/openai/models/model_retrieve_params.rbs index fed08da1..c764c41a 100644 --- a/sig/openai/models/model_retrieve_params.rbs +++ b/sig/openai/models/model_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/moderation.rbs b/sig/openai/models/moderation.rbs index 7d543126..379bbdc3 100644 --- a/sig/openai/models/moderation.rbs +++ b/sig/openai/models/moderation.rbs @@ -24,6 +24,13 @@ module OpenAI flagged: bool ) -> void + def to_hash: -> { + categories: OpenAI::Moderation::Categories, + category_applied_input_types: OpenAI::Moderation::CategoryAppliedInputTypes, + category_scores: OpenAI::Moderation::CategoryScores, + flagged: bool + } + type categories = { harassment: bool, @@ -83,68 +90,100 @@ module OpenAI violence: bool, violence_graphic: bool ) -> void + + def to_hash: -> { + harassment: bool, + harassment_threatening: bool, + hate: bool, + hate_threatening: bool, + illicit: bool?, + illicit_violent: bool?, + self_harm: bool, + self_harm_instructions: bool, + self_harm_intent: bool, + sexual: bool, + sexual_minors: bool, + violence: bool, + violence_graphic: bool + } end type category_applied_input_types = { - harassment: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment], - harassment_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening], - hate: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate], - hate_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening], - illicit: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit], - illicit_violent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent], - self_harm: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm], - self_harm_instructions: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction], - self_harm_intent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent], - sexual: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual], - sexual_minors: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor], - violence: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence], - violence_graphic: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] + harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment], + harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening], + hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate], + hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening], + illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit], + illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent], + self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm], + self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction], + self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent], + sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual], + sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor], + violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence], + violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] } class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel - attr_accessor harassment: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment] + attr_accessor harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] - attr_accessor harassment_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening] + attr_accessor harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] - attr_accessor hate: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate] + attr_accessor hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] - attr_accessor hate_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening] + attr_accessor hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] - attr_accessor illicit: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit] + attr_accessor illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] - attr_accessor illicit_violent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent] + attr_accessor illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] - attr_accessor self_harm: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm] + attr_accessor self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm] - attr_accessor self_harm_instructions: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction] + attr_accessor self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction] - attr_accessor self_harm_intent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent] + attr_accessor self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent] - attr_accessor sexual: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual] + attr_accessor sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual] - attr_accessor sexual_minors: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor] + attr_accessor sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] - attr_accessor violence: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence] + attr_accessor violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence] - attr_accessor violence_graphic: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] + attr_accessor violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] def initialize: ( - harassment: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment], - harassment_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening], - hate: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate], - hate_threatening: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening], - illicit: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit], - illicit_violent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent], - self_harm: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm], - self_harm_instructions: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction], - self_harm_intent: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent], - sexual: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual], - sexual_minors: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor], - violence: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence], - violence_graphic: ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] + harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment], + harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening], + hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate], + hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening], + illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit], + illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent], + self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm], + self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction], + self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent], + sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual], + sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor], + violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence], + violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] ) -> void + def to_hash: -> { + harassment: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment], + harassment_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening], + hate: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate], + hate_threatening: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening], + illicit: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit], + illicit_violent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent], + self_harm: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm], + self_harm_instructions: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction], + self_harm_intent: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent], + sexual: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual], + sexual_minors: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor], + violence: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence], + violence_graphic: ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] + } + type harassment = :text module Harassment @@ -152,7 +191,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment] end type harassment_threatening = :text @@ -162,7 +201,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::harassment_threatening] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::harassment_threatening] end type hate = :text @@ -172,7 +211,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate] end type hate_threatening = :text @@ -182,7 +221,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::hate_threatening] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::hate_threatening] end type illicit = :text @@ -192,7 +231,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit] end type illicit_violent = :text @@ -202,7 +241,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::illicit_violent] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::illicit_violent] end type self_harm = :text | :image @@ -213,7 +252,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm] end type self_harm_instruction = :text | :image @@ -224,7 +263,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_instruction] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_instruction] end type self_harm_intent = :text | :image @@ -235,7 +274,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::self_harm_intent] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::self_harm_intent] end type sexual = :text | :image @@ -246,7 +285,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual] end type sexual_minor = :text @@ -256,7 +295,7 @@ module OpenAI TEXT: :text - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::sexual_minor] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::sexual_minor] end type violence = :text | :image @@ -267,7 +306,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence] end type violence_graphic = :text | :image @@ -278,7 +317,7 @@ module OpenAI TEXT: :text IMAGE: :image - def self?.values: -> ::Array[OpenAI::Moderation::CategoryAppliedInputTypes::violence_graphic] + def self?.values: -> ::Array[OpenAI::Models::Moderation::CategoryAppliedInputTypes::violence_graphic] end end @@ -341,6 +380,22 @@ module OpenAI violence: Float, violence_graphic: Float ) -> void + + def to_hash: -> { + harassment: Float, + harassment_threatening: Float, + hate: Float, + hate_threatening: Float, + illicit: Float, + illicit_violent: Float, + self_harm: Float, + self_harm_instructions: Float, + self_harm_intent: Float, + sexual: Float, + sexual_minors: Float, + violence: Float, + violence_graphic: Float + } end end end diff --git a/sig/openai/models/moderation_create_params.rbs b/sig/openai/models/moderation_create_params.rbs index f681a3bb..832d4052 100644 --- a/sig/openai/models/moderation_create_params.rbs +++ b/sig/openai/models/moderation_create_params.rbs @@ -25,6 +25,12 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + input: OpenAI::Models::ModerationCreateParams::input, + model: OpenAI::Models::ModerationCreateParams::model, + request_options: OpenAI::RequestOptions + } + type input = String | ::Array[String] diff --git a/sig/openai/models/moderation_create_response.rbs b/sig/openai/models/moderation_create_response.rbs index 616ad949..6f9757a1 100644 --- a/sig/openai/models/moderation_create_response.rbs +++ b/sig/openai/models/moderation_create_response.rbs @@ -15,6 +15,12 @@ module OpenAI model: String, results: ::Array[OpenAI::Moderation] ) -> void + + def to_hash: -> { + id: String, + model: String, + results: ::Array[OpenAI::Moderation] + } end end end diff --git a/sig/openai/models/moderation_image_url_input.rbs b/sig/openai/models/moderation_image_url_input.rbs index e099ab00..d373c517 100644 --- a/sig/openai/models/moderation_image_url_input.rbs +++ b/sig/openai/models/moderation_image_url_input.rbs @@ -13,12 +13,19 @@ module OpenAI ?type: :image_url ) -> void + def to_hash: -> { + image_url: OpenAI::ModerationImageURLInput::ImageURL, + type: :image_url + } + type image_url = { url: String } class ImageURL < OpenAI::Internal::Type::BaseModel attr_accessor url: String def initialize: (url: String) -> void + + def to_hash: -> { url: String } end end end diff --git a/sig/openai/models/moderation_text_input.rbs b/sig/openai/models/moderation_text_input.rbs index 6f258e86..fea26990 100644 --- a/sig/openai/models/moderation_text_input.rbs +++ b/sig/openai/models/moderation_text_input.rbs @@ -8,6 +8,8 @@ module OpenAI attr_accessor type: :text def initialize: (text: String, ?type: :text) -> void + + def to_hash: -> { text: String, type: :text } end end end diff --git a/sig/openai/models/other_file_chunking_strategy_object.rbs b/sig/openai/models/other_file_chunking_strategy_object.rbs index 90cc48b4..10298f79 100644 --- a/sig/openai/models/other_file_chunking_strategy_object.rbs +++ b/sig/openai/models/other_file_chunking_strategy_object.rbs @@ -6,6 +6,8 @@ module OpenAI attr_accessor type: :other def initialize: (?type: :other) -> void + + def to_hash: -> { type: :other } end end end diff --git a/sig/openai/models/reasoning.rbs b/sig/openai/models/reasoning.rbs index 9cfe03fc..7a7e744d 100644 --- a/sig/openai/models/reasoning.rbs +++ b/sig/openai/models/reasoning.rbs @@ -3,23 +3,29 @@ module OpenAI type reasoning = { effort: OpenAI::Models::reasoning_effort?, - generate_summary: OpenAI::Reasoning::generate_summary?, - summary: OpenAI::Reasoning::summary? + generate_summary: OpenAI::Models::Reasoning::generate_summary?, + summary: OpenAI::Models::Reasoning::summary? } class Reasoning < OpenAI::Internal::Type::BaseModel attr_accessor effort: OpenAI::Models::reasoning_effort? - attr_accessor generate_summary: OpenAI::Reasoning::generate_summary? + attr_accessor generate_summary: OpenAI::Models::Reasoning::generate_summary? - attr_accessor summary: OpenAI::Reasoning::summary? + attr_accessor summary: OpenAI::Models::Reasoning::summary? def initialize: ( ?effort: OpenAI::Models::reasoning_effort?, - ?generate_summary: OpenAI::Reasoning::generate_summary?, - ?summary: OpenAI::Reasoning::summary? + ?generate_summary: OpenAI::Models::Reasoning::generate_summary?, + ?summary: OpenAI::Models::Reasoning::summary? ) -> void + def to_hash: -> { + effort: OpenAI::Models::reasoning_effort?, + generate_summary: OpenAI::Models::Reasoning::generate_summary?, + summary: OpenAI::Models::Reasoning::summary? + } + type generate_summary = :auto | :concise | :detailed module GenerateSummary @@ -29,7 +35,7 @@ module OpenAI CONCISE: :concise DETAILED: :detailed - def self?.values: -> ::Array[OpenAI::Reasoning::generate_summary] + def self?.values: -> ::Array[OpenAI::Models::Reasoning::generate_summary] end type summary = :auto | :concise | :detailed @@ -41,7 +47,7 @@ module OpenAI CONCISE: :concise DETAILED: :detailed - def self?.values: -> ::Array[OpenAI::Reasoning::summary] + def self?.values: -> ::Array[OpenAI::Models::Reasoning::summary] end end end diff --git a/sig/openai/models/response_format_json_object.rbs b/sig/openai/models/response_format_json_object.rbs index b34ea82e..f8141178 100644 --- a/sig/openai/models/response_format_json_object.rbs +++ b/sig/openai/models/response_format_json_object.rbs @@ -6,6 +6,8 @@ module OpenAI attr_accessor type: :json_object def initialize: (?type: :json_object) -> void + + def to_hash: -> { type: :json_object } end end end diff --git a/sig/openai/models/response_format_json_schema.rbs b/sig/openai/models/response_format_json_schema.rbs index 6f200e90..e806b510 100644 --- a/sig/openai/models/response_format_json_schema.rbs +++ b/sig/openai/models/response_format_json_schema.rbs @@ -16,6 +16,11 @@ module OpenAI ?type: :json_schema ) -> void + def to_hash: -> { + json_schema: OpenAI::ResponseFormatJSONSchema::JSONSchema, + type: :json_schema + } + type json_schema = { name: String, @@ -43,6 +48,13 @@ module OpenAI ?schema: ::Hash[Symbol, top], ?strict: bool? ) -> void + + def to_hash: -> { + name: String, + description: String, + schema: ::Hash[Symbol, top], + strict: bool? + } end end end diff --git a/sig/openai/models/response_format_text.rbs b/sig/openai/models/response_format_text.rbs index b245c218..305c9a4d 100644 --- a/sig/openai/models/response_format_text.rbs +++ b/sig/openai/models/response_format_text.rbs @@ -6,6 +6,8 @@ module OpenAI attr_accessor type: :text def initialize: (?type: :text) -> void + + def to_hash: -> { type: :text } end end end diff --git a/sig/openai/models/responses/computer_tool.rbs b/sig/openai/models/responses/computer_tool.rbs index 23544e1a..a2e42564 100644 --- a/sig/openai/models/responses/computer_tool.rbs +++ b/sig/openai/models/responses/computer_tool.rbs @@ -5,7 +5,7 @@ module OpenAI { display_height: Integer, display_width: Integer, - environment: OpenAI::Responses::ComputerTool::environment, + environment: OpenAI::Models::Responses::ComputerTool::environment, type: :computer_use_preview } @@ -14,17 +14,24 @@ module OpenAI attr_accessor display_width: Integer - attr_accessor environment: OpenAI::Responses::ComputerTool::environment + attr_accessor environment: OpenAI::Models::Responses::ComputerTool::environment attr_accessor type: :computer_use_preview def initialize: ( display_height: Integer, display_width: Integer, - environment: OpenAI::Responses::ComputerTool::environment, + environment: OpenAI::Models::Responses::ComputerTool::environment, ?type: :computer_use_preview ) -> void + def to_hash: -> { + display_height: Integer, + display_width: Integer, + environment: OpenAI::Models::Responses::ComputerTool::environment, + type: :computer_use_preview + } + type environment = :windows | :mac | :linux | :ubuntu | :browser module Environment @@ -36,7 +43,7 @@ module OpenAI UBUNTU: :ubuntu BROWSER: :browser - def self?.values: -> ::Array[OpenAI::Responses::ComputerTool::environment] + def self?.values: -> ::Array[OpenAI::Models::Responses::ComputerTool::environment] end end end diff --git a/sig/openai/models/responses/easy_input_message.rbs b/sig/openai/models/responses/easy_input_message.rbs index 0b52f94f..eb94d8c1 100644 --- a/sig/openai/models/responses/easy_input_message.rbs +++ b/sig/openai/models/responses/easy_input_message.rbs @@ -3,28 +3,34 @@ module OpenAI module Responses type easy_input_message = { - content: OpenAI::Responses::EasyInputMessage::content, - role: OpenAI::Responses::EasyInputMessage::role, - type: OpenAI::Responses::EasyInputMessage::type_ + content: OpenAI::Models::Responses::EasyInputMessage::content, + role: OpenAI::Models::Responses::EasyInputMessage::role, + type: OpenAI::Models::Responses::EasyInputMessage::type_ } class EasyInputMessage < OpenAI::Internal::Type::BaseModel - attr_accessor content: OpenAI::Responses::EasyInputMessage::content + attr_accessor content: OpenAI::Models::Responses::EasyInputMessage::content - attr_accessor role: OpenAI::Responses::EasyInputMessage::role + attr_accessor role: OpenAI::Models::Responses::EasyInputMessage::role - attr_reader type: OpenAI::Responses::EasyInputMessage::type_? + attr_reader type: OpenAI::Models::Responses::EasyInputMessage::type_? def type=: ( - OpenAI::Responses::EasyInputMessage::type_ - ) -> OpenAI::Responses::EasyInputMessage::type_ + OpenAI::Models::Responses::EasyInputMessage::type_ + ) -> OpenAI::Models::Responses::EasyInputMessage::type_ def initialize: ( - content: OpenAI::Responses::EasyInputMessage::content, - role: OpenAI::Responses::EasyInputMessage::role, - ?type: OpenAI::Responses::EasyInputMessage::type_ + content: OpenAI::Models::Responses::EasyInputMessage::content, + role: OpenAI::Models::Responses::EasyInputMessage::role, + ?type: OpenAI::Models::Responses::EasyInputMessage::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Responses::EasyInputMessage::content, + role: OpenAI::Models::Responses::EasyInputMessage::role, + type: OpenAI::Models::Responses::EasyInputMessage::type_ + } + type content = String | OpenAI::Models::Responses::response_input_message_content_list @@ -32,7 +38,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::EasyInputMessage::content] + def self?.variants: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::content] end type role = :user | :assistant | :system | :developer @@ -45,7 +51,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Responses::EasyInputMessage::role] + def self?.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::role] end type type_ = :message @@ -55,7 +61,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Responses::EasyInputMessage::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::EasyInputMessage::type_] end end end diff --git a/sig/openai/models/responses/file_search_tool.rbs b/sig/openai/models/responses/file_search_tool.rbs index da6a8189..cc1a7d01 100644 --- a/sig/openai/models/responses/file_search_tool.rbs +++ b/sig/openai/models/responses/file_search_tool.rbs @@ -5,7 +5,7 @@ module OpenAI { type: :file_search, vector_store_ids: ::Array[String], - filters: OpenAI::Responses::FileSearchTool::filters?, + filters: OpenAI::Models::Responses::FileSearchTool::filters?, max_num_results: Integer, ranking_options: OpenAI::Responses::FileSearchTool::RankingOptions } @@ -15,7 +15,7 @@ module OpenAI attr_accessor vector_store_ids: ::Array[String] - attr_accessor filters: OpenAI::Responses::FileSearchTool::filters? + attr_accessor filters: OpenAI::Models::Responses::FileSearchTool::filters? attr_reader max_num_results: Integer? @@ -29,42 +29,55 @@ module OpenAI def initialize: ( vector_store_ids: ::Array[String], - ?filters: OpenAI::Responses::FileSearchTool::filters?, + ?filters: OpenAI::Models::Responses::FileSearchTool::filters?, ?max_num_results: Integer, ?ranking_options: OpenAI::Responses::FileSearchTool::RankingOptions, ?type: :file_search ) -> void + def to_hash: -> { + type: :file_search, + vector_store_ids: ::Array[String], + filters: OpenAI::Models::Responses::FileSearchTool::filters?, + max_num_results: Integer, + ranking_options: OpenAI::Responses::FileSearchTool::RankingOptions + } + type filters = OpenAI::ComparisonFilter | OpenAI::CompoundFilter module Filters extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::FileSearchTool::filters] + def self?.variants: -> ::Array[OpenAI::Models::Responses::FileSearchTool::filters] end type ranking_options = { - ranker: OpenAI::Responses::FileSearchTool::RankingOptions::ranker, + ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, score_threshold: Float } class RankingOptions < OpenAI::Internal::Type::BaseModel - attr_reader ranker: OpenAI::Responses::FileSearchTool::RankingOptions::ranker? + attr_reader ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker? def ranker=: ( - OpenAI::Responses::FileSearchTool::RankingOptions::ranker - ) -> OpenAI::Responses::FileSearchTool::RankingOptions::ranker + OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker + ) -> OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker attr_reader score_threshold: Float? def score_threshold=: (Float) -> Float def initialize: ( - ?ranker: OpenAI::Responses::FileSearchTool::RankingOptions::ranker, + ?ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, ?score_threshold: Float ) -> void + def to_hash: -> { + ranker: OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker, + score_threshold: Float + } + type ranker = :auto | :"default-2024-11-15" module Ranker @@ -73,7 +86,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" - def self?.values: -> ::Array[OpenAI::Responses::FileSearchTool::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::Responses::FileSearchTool::RankingOptions::ranker] end end end diff --git a/sig/openai/models/responses/function_tool.rbs b/sig/openai/models/responses/function_tool.rbs index eb7df928..df249b37 100644 --- a/sig/openai/models/responses/function_tool.rbs +++ b/sig/openai/models/responses/function_tool.rbs @@ -28,6 +28,14 @@ module OpenAI ?description: String?, ?type: :function ) -> void + + def to_hash: -> { + name: String, + parameters: ::Hash[Symbol, top]?, + strict: bool?, + type: :function, + description: String? + } end end end diff --git a/sig/openai/models/responses/input_item_list_params.rbs b/sig/openai/models/responses/input_item_list_params.rbs index 0897b4e9..4760e97a 100644 --- a/sig/openai/models/responses/input_item_list_params.rbs +++ b/sig/openai/models/responses/input_item_list_params.rbs @@ -48,6 +48,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + include: ::Array[OpenAI::Models::Responses::response_includable], + limit: Integer, + order: OpenAI::Models::Responses::InputItemListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/responses/response.rbs b/sig/openai/models/responses/response.rbs index ea887796..d5658b8f 100644 --- a/sig/openai/models/responses/response.rbs +++ b/sig/openai/models/responses/response.rbs @@ -14,16 +14,17 @@ module OpenAI output: ::Array[OpenAI::Models::Responses::response_output_item], parallel_tool_calls: bool, temperature: Float?, - tool_choice: OpenAI::Responses::Response::tool_choice, + tool_choice: OpenAI::Models::Responses::Response::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, + background: bool?, max_output_tokens: Integer?, previous_response_id: String?, reasoning: OpenAI::Reasoning?, - service_tier: OpenAI::Responses::Response::service_tier?, + service_tier: OpenAI::Models::Responses::Response::service_tier?, status: OpenAI::Models::Responses::response_status, text: OpenAI::Responses::ResponseTextConfig, - truncation: OpenAI::Responses::Response::truncation?, + truncation: OpenAI::Models::Responses::Response::truncation?, usage: OpenAI::Responses::ResponseUsage, user: String } @@ -51,19 +52,21 @@ module OpenAI attr_accessor temperature: Float? - attr_accessor tool_choice: OpenAI::Responses::Response::tool_choice + attr_accessor tool_choice: OpenAI::Models::Responses::Response::tool_choice attr_accessor tools: ::Array[OpenAI::Models::Responses::tool] attr_accessor top_p: Float? + attr_accessor background: bool? + attr_accessor max_output_tokens: Integer? attr_accessor previous_response_id: String? attr_accessor reasoning: OpenAI::Reasoning? - attr_accessor service_tier: OpenAI::Responses::Response::service_tier? + attr_accessor service_tier: OpenAI::Models::Responses::Response::service_tier? attr_reader status: OpenAI::Models::Responses::response_status? @@ -77,7 +80,7 @@ module OpenAI OpenAI::Responses::ResponseTextConfig ) -> OpenAI::Responses::ResponseTextConfig - attr_accessor truncation: OpenAI::Responses::Response::truncation? + attr_accessor truncation: OpenAI::Models::Responses::Response::truncation? attr_reader usage: OpenAI::Responses::ResponseUsage? @@ -100,35 +103,69 @@ module OpenAI output: ::Array[OpenAI::Models::Responses::response_output_item], parallel_tool_calls: bool, temperature: Float?, - tool_choice: OpenAI::Responses::Response::tool_choice, + tool_choice: OpenAI::Models::Responses::Response::tool_choice, tools: ::Array[OpenAI::Models::Responses::tool], top_p: Float?, + ?background: bool?, ?max_output_tokens: Integer?, ?previous_response_id: String?, ?reasoning: OpenAI::Reasoning?, - ?service_tier: OpenAI::Responses::Response::service_tier?, + ?service_tier: OpenAI::Models::Responses::Response::service_tier?, ?status: OpenAI::Models::Responses::response_status, ?text: OpenAI::Responses::ResponseTextConfig, - ?truncation: OpenAI::Responses::Response::truncation?, + ?truncation: OpenAI::Models::Responses::Response::truncation?, ?usage: OpenAI::Responses::ResponseUsage, ?user: String, ?object: :response ) -> void + def to_hash: -> { + id: String, + created_at: Float, + error: OpenAI::Responses::ResponseError?, + incomplete_details: OpenAI::Responses::Response::IncompleteDetails?, + instructions: String?, + metadata: OpenAI::Models::metadata?, + model: OpenAI::Models::responses_model, + object: :response, + output: ::Array[OpenAI::Models::Responses::response_output_item], + parallel_tool_calls: bool, + temperature: Float?, + tool_choice: OpenAI::Models::Responses::Response::tool_choice, + tools: ::Array[OpenAI::Models::Responses::tool], + top_p: Float?, + background: bool?, + max_output_tokens: Integer?, + previous_response_id: String?, + reasoning: OpenAI::Reasoning?, + service_tier: OpenAI::Models::Responses::Response::service_tier?, + status: OpenAI::Models::Responses::response_status, + text: OpenAI::Responses::ResponseTextConfig, + truncation: OpenAI::Models::Responses::Response::truncation?, + usage: OpenAI::Responses::ResponseUsage, + user: String + } + type incomplete_details = - { reason: OpenAI::Responses::Response::IncompleteDetails::reason } + { + reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason + } class IncompleteDetails < OpenAI::Internal::Type::BaseModel - attr_reader reason: OpenAI::Responses::Response::IncompleteDetails::reason? + attr_reader reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason? def reason=: ( - OpenAI::Responses::Response::IncompleteDetails::reason - ) -> OpenAI::Responses::Response::IncompleteDetails::reason + OpenAI::Models::Responses::Response::IncompleteDetails::reason + ) -> OpenAI::Models::Responses::Response::IncompleteDetails::reason def initialize: ( - ?reason: OpenAI::Responses::Response::IncompleteDetails::reason + ?reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason ) -> void + def to_hash: -> { + reason: OpenAI::Models::Responses::Response::IncompleteDetails::reason + } + type reason = :max_output_tokens | :content_filter module Reason @@ -137,7 +174,7 @@ module OpenAI MAX_OUTPUT_TOKENS: :max_output_tokens CONTENT_FILTER: :content_filter - def self?.values: -> ::Array[OpenAI::Responses::Response::IncompleteDetails::reason] + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::IncompleteDetails::reason] end end @@ -149,7 +186,7 @@ module OpenAI module ToolChoice extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::Response::tool_choice] + def self?.variants: -> ::Array[OpenAI::Models::Responses::Response::tool_choice] end type service_tier = :auto | :default | :flex @@ -161,7 +198,7 @@ module OpenAI DEFAULT: :default FLEX: :flex - def self?.values: -> ::Array[OpenAI::Responses::Response::service_tier] + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::service_tier] end type truncation = :auto | :disabled @@ -172,7 +209,7 @@ module OpenAI AUTO: :auto DISABLED: :disabled - def self?.values: -> ::Array[OpenAI::Responses::Response::truncation] + def self?.values: -> ::Array[OpenAI::Models::Responses::Response::truncation] end end end diff --git a/sig/openai/models/responses/response_audio_delta_event.rbs b/sig/openai/models/responses/response_audio_delta_event.rbs index 370be606..f0bcab36 100644 --- a/sig/openai/models/responses/response_audio_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_delta_event.rbs @@ -2,14 +2,30 @@ module OpenAI module Models module Responses type response_audio_delta_event = - { delta: String, type: :"response.audio.delta" } + { + delta: String, + sequence_number: Integer, + type: :"response.audio.delta" + } class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor delta: String + attr_accessor sequence_number: Integer + attr_accessor type: :"response.audio.delta" - def initialize: (delta: String, ?type: :"response.audio.delta") -> void + def initialize: ( + delta: String, + sequence_number: Integer, + ?type: :"response.audio.delta" + ) -> void + + def to_hash: -> { + delta: String, + sequence_number: Integer, + type: :"response.audio.delta" + } end end end diff --git a/sig/openai/models/responses/response_audio_done_event.rbs b/sig/openai/models/responses/response_audio_done_event.rbs index 9399bfae..1c891acf 100644 --- a/sig/openai/models/responses/response_audio_done_event.rbs +++ b/sig/openai/models/responses/response_audio_done_event.rbs @@ -1,12 +1,23 @@ module OpenAI module Models module Responses - type response_audio_done_event = { type: :"response.audio.done" } + type response_audio_done_event = + { sequence_number: Integer, type: :"response.audio.done" } class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + attr_accessor type: :"response.audio.done" - def initialize: (?type: :"response.audio.done") -> void + def initialize: ( + sequence_number: Integer, + ?type: :"response.audio.done" + ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.audio.done" + } end end end diff --git a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs index ef7c93c3..57c45293 100644 --- a/sig/openai/models/responses/response_audio_transcript_delta_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_delta_event.rbs @@ -2,17 +2,30 @@ module OpenAI module Models module Responses type response_audio_transcript_delta_event = - { delta: String, type: :"response.audio.transcript.delta" } + { + delta: String, + sequence_number: Integer, + type: :"response.audio.transcript.delta" + } class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel attr_accessor delta: String + attr_accessor sequence_number: Integer + attr_accessor type: :"response.audio.transcript.delta" def initialize: ( delta: String, + sequence_number: Integer, ?type: :"response.audio.transcript.delta" ) -> void + + def to_hash: -> { + delta: String, + sequence_number: Integer, + type: :"response.audio.transcript.delta" + } end end end diff --git a/sig/openai/models/responses/response_audio_transcript_done_event.rbs b/sig/openai/models/responses/response_audio_transcript_done_event.rbs index 7397feff..7bd59ad5 100644 --- a/sig/openai/models/responses/response_audio_transcript_done_event.rbs +++ b/sig/openai/models/responses/response_audio_transcript_done_event.rbs @@ -2,12 +2,22 @@ module OpenAI module Models module Responses type response_audio_transcript_done_event = - { type: :"response.audio.transcript.done" } + { sequence_number: Integer, type: :"response.audio.transcript.done" } class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + attr_accessor type: :"response.audio.transcript.done" - def initialize: (?type: :"response.audio.transcript.done") -> void + def initialize: ( + sequence_number: Integer, + ?type: :"response.audio.transcript.done" + ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.audio.transcript.done" + } end end end diff --git a/sig/openai/models/responses/response_cancel_params.rbs b/sig/openai/models/responses/response_cancel_params.rbs new file mode 100644 index 00000000..6f0df4a4 --- /dev/null +++ b/sig/openai/models/responses/response_cancel_params.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Models + module Responses + type response_cancel_params = + { } & OpenAI::Internal::Type::request_parameters + + class ResponseCancelParams < OpenAI::Internal::Type::BaseModel + extend OpenAI::Internal::Type::RequestParameters::Converter + include OpenAI::Internal::Type::RequestParameters + + def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } + end + end + end +end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs index 212d9bfa..2da40939 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_delta_event.rbs @@ -5,6 +5,7 @@ module OpenAI { delta: String, output_index: Integer, + sequence_number: Integer, type: :"response.code_interpreter_call.code.delta" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.code_interpreter_call.code.delta" def initialize: ( delta: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.code_interpreter_call.code.delta" ) -> void + + def to_hash: -> { + delta: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.code_interpreter_call.code.delta" + } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs index 72f8bc5d..5f796490 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_code_done_event.rbs @@ -5,6 +5,7 @@ module OpenAI { code: String, output_index: Integer, + sequence_number: Integer, type: :"response.code_interpreter_call.code.done" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.code_interpreter_call.code.done" def initialize: ( code: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.code_interpreter_call.code.done" ) -> void + + def to_hash: -> { + code: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.code_interpreter_call.code.done" + } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs index d2d8e451..d7833fb2 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_completed_event.rbs @@ -5,6 +5,7 @@ module OpenAI { code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: :"response.code_interpreter_call.completed" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.code_interpreter_call.completed" def initialize: ( code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, ?type: :"response.code_interpreter_call.completed" ) -> void + + def to_hash: -> { + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + sequence_number: Integer, + type: :"response.code_interpreter_call.completed" + } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs index d4d09537..5efe8048 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_in_progress_event.rbs @@ -5,6 +5,7 @@ module OpenAI { code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: :"response.code_interpreter_call.in_progress" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.code_interpreter_call.in_progress" def initialize: ( code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, ?type: :"response.code_interpreter_call.in_progress" ) -> void + + def to_hash: -> { + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + sequence_number: Integer, + type: :"response.code_interpreter_call.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs index 8fd6cac0..ccdd6dae 100644 --- a/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs +++ b/sig/openai/models/responses/response_code_interpreter_call_interpreting_event.rbs @@ -5,6 +5,7 @@ module OpenAI { code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, type: :"response.code_interpreter_call.interpreting" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.code_interpreter_call.interpreting" def initialize: ( code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, output_index: Integer, + sequence_number: Integer, ?type: :"response.code_interpreter_call.interpreting" ) -> void + + def to_hash: -> { + code_interpreter_call: OpenAI::Responses::ResponseCodeInterpreterToolCall, + output_index: Integer, + sequence_number: Integer, + type: :"response.code_interpreter_call.interpreting" + } end end end diff --git a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs index c892569f..cfca1f50 100644 --- a/sig/openai/models/responses/response_code_interpreter_tool_call.rbs +++ b/sig/openai/models/responses/response_code_interpreter_tool_call.rbs @@ -5,9 +5,10 @@ module OpenAI { id: String, code: String, - results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result], - status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status, - type: :code_interpreter_call + results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, + type: :code_interpreter_call, + container_id: String } class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel @@ -15,20 +16,34 @@ module OpenAI attr_accessor code: String - attr_accessor results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result] + attr_accessor results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result] - attr_accessor status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status attr_accessor type: :code_interpreter_call + attr_reader container_id: String? + + def container_id=: (String) -> String + def initialize: ( id: String, code: String, - results: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result], - status: OpenAI::Responses::ResponseCodeInterpreterToolCall::status, + results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, + ?container_id: String, ?type: :code_interpreter_call ) -> void + def to_hash: -> { + id: String, + code: String, + results: ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result], + status: OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status, + type: :code_interpreter_call, + container_id: String + } + type result = OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Logs | OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files @@ -44,6 +59,8 @@ module OpenAI attr_accessor type: :logs def initialize: (logs: String, ?type: :logs) -> void + + def to_hash: -> { logs: String, type: :logs } end type files = @@ -62,6 +79,11 @@ module OpenAI ?type: :files ) -> void + def to_hash: -> { + files: ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::Result::Files::File], + type: :files + } + type file = { file_id: String, mime_type: String } class File < OpenAI::Internal::Type::BaseModel @@ -70,10 +92,12 @@ module OpenAI attr_accessor mime_type: String def initialize: (file_id: String, mime_type: String) -> void + + def to_hash: -> { file_id: String, mime_type: String } end end - def self?.variants: -> ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::result] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::result] end type status = :in_progress | :interpreting | :completed @@ -85,7 +109,7 @@ module OpenAI INTERPRETING: :interpreting COMPLETED: :completed - def self?.values: -> ::Array[OpenAI::Responses::ResponseCodeInterpreterToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::status] end end end diff --git a/sig/openai/models/responses/response_completed_event.rbs b/sig/openai/models/responses/response_completed_event.rbs index 4900852e..e3c62ea6 100644 --- a/sig/openai/models/responses/response_completed_event.rbs +++ b/sig/openai/models/responses/response_completed_event.rbs @@ -2,17 +2,30 @@ module OpenAI module Models module Responses type response_completed_event = - { response: OpenAI::Responses::Response, type: :"response.completed" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.completed" + } class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.completed" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.completed" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.completed" + } end end end diff --git a/sig/openai/models/responses/response_computer_tool_call.rbs b/sig/openai/models/responses/response_computer_tool_call.rbs index 52c82604..094df5fd 100644 --- a/sig/openai/models/responses/response_computer_tool_call.rbs +++ b/sig/openai/models/responses/response_computer_tool_call.rbs @@ -4,35 +4,44 @@ module OpenAI type response_computer_tool_call = { id: String, - action: OpenAI::Responses::ResponseComputerToolCall::action, + action: OpenAI::Models::Responses::ResponseComputerToolCall::action, call_id: String, pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: OpenAI::Responses::ResponseComputerToolCall::status, - type: OpenAI::Responses::ResponseComputerToolCall::type_ + status: OpenAI::Models::Responses::ResponseComputerToolCall::status, + type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ } class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor action: OpenAI::Responses::ResponseComputerToolCall::action + attr_accessor action: OpenAI::Models::Responses::ResponseComputerToolCall::action attr_accessor call_id: String attr_accessor pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck] - attr_accessor status: OpenAI::Responses::ResponseComputerToolCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseComputerToolCall::status - attr_accessor type: OpenAI::Responses::ResponseComputerToolCall::type_ + attr_accessor type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ def initialize: ( id: String, - action: OpenAI::Responses::ResponseComputerToolCall::action, + action: OpenAI::Models::Responses::ResponseComputerToolCall::action, call_id: String, pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck], - status: OpenAI::Responses::ResponseComputerToolCall::status, - type: OpenAI::Responses::ResponseComputerToolCall::type_ + status: OpenAI::Models::Responses::ResponseComputerToolCall::status, + type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ ) -> void + def to_hash: -> { + id: String, + action: OpenAI::Models::Responses::ResponseComputerToolCall::action, + call_id: String, + pending_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCall::PendingSafetyCheck], + status: OpenAI::Models::Responses::ResponseComputerToolCall::status, + type: OpenAI::Models::Responses::ResponseComputerToolCall::type_ + } + type action = OpenAI::Responses::ResponseComputerToolCall::Action::Click | OpenAI::Responses::ResponseComputerToolCall::Action::DoubleClick @@ -49,14 +58,14 @@ module OpenAI type click = { - button: OpenAI::Responses::ResponseComputerToolCall::Action::Click::button, + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, type: :click, x: Integer, y_: Integer } class Click < OpenAI::Internal::Type::BaseModel - attr_accessor button: OpenAI::Responses::ResponseComputerToolCall::Action::Click::button + attr_accessor button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button attr_accessor type: :click @@ -65,12 +74,19 @@ module OpenAI attr_accessor y_: Integer def initialize: ( - button: OpenAI::Responses::ResponseComputerToolCall::Action::Click::button, + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, x: Integer, y_: Integer, ?type: :click ) -> void + def to_hash: -> { + button: OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button, + type: :click, + x: Integer, + y_: Integer + } + type button = :left | :right | :wheel | :back | :forward module Button @@ -82,7 +98,7 @@ module OpenAI BACK: :back FORWARD: :forward - def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::Action::Click::button] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::button] end end @@ -100,6 +116,8 @@ module OpenAI y_: Integer, ?type: :double_click ) -> void + + def to_hash: -> { type: :double_click, x: Integer, y_: Integer } end type drag = @@ -118,6 +136,11 @@ module OpenAI ?type: :drag ) -> void + def to_hash: -> { + path: ::Array[OpenAI::Responses::ResponseComputerToolCall::Action::Drag::Path], + type: :drag + } + type path = { x: Integer, y_: Integer } class Path < OpenAI::Internal::Type::BaseModel @@ -126,6 +149,8 @@ module OpenAI attr_accessor y_: Integer def initialize: (x: Integer, y_: Integer) -> void + + def to_hash: -> { x: Integer, y_: Integer } end end @@ -137,6 +162,8 @@ module OpenAI attr_accessor type: :keypress def initialize: (keys: ::Array[String], ?type: :keypress) -> void + + def to_hash: -> { keys: ::Array[String], type: :keypress } end type move = { type: :move, x: Integer, y_: Integer } @@ -149,6 +176,8 @@ module OpenAI attr_accessor y_: Integer def initialize: (x: Integer, y_: Integer, ?type: :move) -> void + + def to_hash: -> { type: :move, x: Integer, y_: Integer } end type screenshot = { type: :screenshot } @@ -157,6 +186,8 @@ module OpenAI attr_accessor type: :screenshot def initialize: (?type: :screenshot) -> void + + def to_hash: -> { type: :screenshot } end type scroll = @@ -186,6 +217,14 @@ module OpenAI y_: Integer, ?type: :scroll ) -> void + + def to_hash: -> { + scroll_x: Integer, + scroll_y: Integer, + type: :scroll, + x: Integer, + y_: Integer + } end type type_ = { text: String, type: :type } @@ -196,6 +235,8 @@ module OpenAI attr_accessor type: :type def initialize: (text: String, ?type: :type) -> void + + def to_hash: -> { text: String, type: :type } end type wait = { type: :wait } @@ -204,9 +245,11 @@ module OpenAI attr_accessor type: :wait def initialize: (?type: :wait) -> void + + def to_hash: -> { type: :wait } end - def self?.variants: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::action] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::action] end type pending_safety_check = @@ -220,6 +263,8 @@ module OpenAI attr_accessor message: String def initialize: (id: String, code: String, message: String) -> void + + def to_hash: -> { id: String, code: String, message: String } end type status = :in_progress | :completed | :incomplete @@ -231,7 +276,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::status] end type type_ = :computer_call @@ -241,7 +286,7 @@ module OpenAI COMPUTER_CALL: :computer_call - def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCall::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCall::type_] end end end diff --git a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs index 4ce6f3a0..a21f2310 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_item.rbs @@ -8,7 +8,7 @@ module OpenAI output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, type: :computer_call_output, acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - status: OpenAI::Responses::ResponseComputerToolCallOutputItem::status + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status } class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel @@ -26,21 +26,30 @@ module OpenAI ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] ) -> ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] - attr_reader status: OpenAI::Responses::ResponseComputerToolCallOutputItem::status? + attr_reader status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status? def status=: ( - OpenAI::Responses::ResponseComputerToolCallOutputItem::status - ) -> OpenAI::Responses::ResponseComputerToolCallOutputItem::status + OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status + ) -> OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status def initialize: ( id: String, call_id: String, output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, ?acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], - ?status: OpenAI::Responses::ResponseComputerToolCallOutputItem::status, + ?status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status, ?type: :computer_call_output ) -> void + def to_hash: -> { + id: String, + call_id: String, + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + type: :computer_call_output, + acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck], + status: OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status + } + type acknowledged_safety_check = { id: String, code: String, message: String } @@ -52,6 +61,8 @@ module OpenAI attr_accessor message: String def initialize: (id: String, code: String, message: String) -> void + + def to_hash: -> { id: String, code: String, message: String } end type status = :in_progress | :completed | :incomplete @@ -63,7 +74,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseComputerToolCallOutputItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::status] end end end diff --git a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs index 5cea0d21..3f522d97 100644 --- a/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs +++ b/sig/openai/models/responses/response_computer_tool_call_output_screenshot.rbs @@ -20,6 +20,12 @@ module OpenAI ?image_url: String, ?type: :computer_screenshot ) -> void + + def to_hash: -> { + type: :computer_screenshot, + file_id: String, + image_url: String + } end end end diff --git a/sig/openai/models/responses/response_content_part_added_event.rbs b/sig/openai/models/responses/response_content_part_added_event.rbs index 7f1ecbc3..efdf1ffe 100644 --- a/sig/openai/models/responses/response_content_part_added_event.rbs +++ b/sig/openai/models/responses/response_content_part_added_event.rbs @@ -6,7 +6,8 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Responses::ResponseContentPartAddedEvent::part, + part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, + sequence_number: Integer, type: :"response.content_part.added" } @@ -17,7 +18,9 @@ module OpenAI attr_accessor output_index: Integer - attr_accessor part: OpenAI::Responses::ResponseContentPartAddedEvent::part + attr_accessor part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part + + attr_accessor sequence_number: Integer attr_accessor type: :"response.content_part.added" @@ -25,10 +28,20 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Responses::ResponseContentPartAddedEvent::part, + part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, + sequence_number: Integer, ?type: :"response.content_part.added" ) -> void + def to_hash: -> { + content_index: Integer, + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseContentPartAddedEvent::part, + sequence_number: Integer, + type: :"response.content_part.added" + } + type part = OpenAI::Responses::ResponseOutputText | OpenAI::Responses::ResponseOutputRefusal @@ -36,7 +49,7 @@ module OpenAI module Part extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::ResponseContentPartAddedEvent::part] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseContentPartAddedEvent::part] end end end diff --git a/sig/openai/models/responses/response_content_part_done_event.rbs b/sig/openai/models/responses/response_content_part_done_event.rbs index 2cfd7195..53ea73b3 100644 --- a/sig/openai/models/responses/response_content_part_done_event.rbs +++ b/sig/openai/models/responses/response_content_part_done_event.rbs @@ -6,7 +6,8 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Responses::ResponseContentPartDoneEvent::part, + part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, + sequence_number: Integer, type: :"response.content_part.done" } @@ -17,7 +18,9 @@ module OpenAI attr_accessor output_index: Integer - attr_accessor part: OpenAI::Responses::ResponseContentPartDoneEvent::part + attr_accessor part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part + + attr_accessor sequence_number: Integer attr_accessor type: :"response.content_part.done" @@ -25,10 +28,20 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, - part: OpenAI::Responses::ResponseContentPartDoneEvent::part, + part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, + sequence_number: Integer, ?type: :"response.content_part.done" ) -> void + def to_hash: -> { + content_index: Integer, + item_id: String, + output_index: Integer, + part: OpenAI::Models::Responses::ResponseContentPartDoneEvent::part, + sequence_number: Integer, + type: :"response.content_part.done" + } + type part = OpenAI::Responses::ResponseOutputText | OpenAI::Responses::ResponseOutputRefusal @@ -36,7 +49,7 @@ module OpenAI module Part extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::ResponseContentPartDoneEvent::part] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseContentPartDoneEvent::part] end end end diff --git a/sig/openai/models/responses/response_create_params.rbs b/sig/openai/models/responses/response_create_params.rbs index cf306e43..3050697d 100644 --- a/sig/openai/models/responses/response_create_params.rbs +++ b/sig/openai/models/responses/response_create_params.rbs @@ -5,6 +5,7 @@ module OpenAI { input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, + background: bool?, include: ::Array[OpenAI::Models::Responses::response_includable]?, instructions: String?, max_output_tokens: Integer?, @@ -32,6 +33,8 @@ module OpenAI attr_accessor model: OpenAI::Models::responses_model + attr_accessor background: bool? + attr_accessor include: ::Array[OpenAI::Models::Responses::response_includable]? attr_accessor instructions: String? @@ -81,6 +84,7 @@ module OpenAI def initialize: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, + ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?instructions: String?, ?max_output_tokens: Integer?, @@ -100,6 +104,29 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + input: OpenAI::Models::Responses::ResponseCreateParams::input, + model: OpenAI::Models::responses_model, + background: bool?, + include: ::Array[OpenAI::Models::Responses::response_includable]?, + instructions: String?, + max_output_tokens: Integer?, + metadata: OpenAI::Models::metadata?, + parallel_tool_calls: bool?, + previous_response_id: String?, + reasoning: OpenAI::Reasoning?, + service_tier: OpenAI::Models::Responses::ResponseCreateParams::service_tier?, + store: bool?, + temperature: Float?, + text: OpenAI::Responses::ResponseTextConfig, + tool_choice: OpenAI::Models::Responses::ResponseCreateParams::tool_choice, + tools: ::Array[OpenAI::Models::Responses::tool], + top_p: Float?, + truncation: OpenAI::Models::Responses::ResponseCreateParams::truncation?, + user: String, + request_options: OpenAI::RequestOptions + } + type input = String | OpenAI::Models::Responses::response_input module Input diff --git a/sig/openai/models/responses/response_created_event.rbs b/sig/openai/models/responses/response_created_event.rbs index e93fe5a7..1681e66e 100644 --- a/sig/openai/models/responses/response_created_event.rbs +++ b/sig/openai/models/responses/response_created_event.rbs @@ -2,17 +2,30 @@ module OpenAI module Models module Responses type response_created_event = - { response: OpenAI::Responses::Response, type: :"response.created" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.created" + } class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.created" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.created" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.created" + } end end end diff --git a/sig/openai/models/responses/response_delete_params.rbs b/sig/openai/models/responses/response_delete_params.rbs index fceca07d..15aa4a54 100644 --- a/sig/openai/models/responses/response_delete_params.rbs +++ b/sig/openai/models/responses/response_delete_params.rbs @@ -9,6 +9,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/responses/response_error.rbs b/sig/openai/models/responses/response_error.rbs index ee1b5f7a..a894688d 100644 --- a/sig/openai/models/responses/response_error.rbs +++ b/sig/openai/models/responses/response_error.rbs @@ -2,18 +2,26 @@ module OpenAI module Models module Responses type response_error = - { code: OpenAI::Responses::ResponseError::code, message: String } + { + code: OpenAI::Models::Responses::ResponseError::code, + message: String + } class ResponseError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::Responses::ResponseError::code + attr_accessor code: OpenAI::Models::Responses::ResponseError::code attr_accessor message: String def initialize: ( - code: OpenAI::Responses::ResponseError::code, + code: OpenAI::Models::Responses::ResponseError::code, message: String ) -> void + def to_hash: -> { + code: OpenAI::Models::Responses::ResponseError::code, + message: String + } + type code = :server_error | :rate_limit_exceeded @@ -56,7 +64,7 @@ module OpenAI FAILED_TO_DOWNLOAD_IMAGE: :failed_to_download_image IMAGE_FILE_NOT_FOUND: :image_file_not_found - def self?.values: -> ::Array[OpenAI::Responses::ResponseError::code] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseError::code] end end end diff --git a/sig/openai/models/responses/response_error_event.rbs b/sig/openai/models/responses/response_error_event.rbs index ac79ae54..a7aa3f3f 100644 --- a/sig/openai/models/responses/response_error_event.rbs +++ b/sig/openai/models/responses/response_error_event.rbs @@ -2,7 +2,13 @@ module OpenAI module Models module Responses type response_error_event = - { code: String?, message: String, param: String?, type: :error } + { + code: String?, + message: String, + param: String?, + sequence_number: Integer, + type: :error + } class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel attr_accessor code: String? @@ -11,14 +17,25 @@ module OpenAI attr_accessor param: String? + attr_accessor sequence_number: Integer + attr_accessor type: :error def initialize: ( code: String?, message: String, param: String?, + sequence_number: Integer, ?type: :error ) -> void + + def to_hash: -> { + code: String?, + message: String, + param: String?, + sequence_number: Integer, + type: :error + } end end end diff --git a/sig/openai/models/responses/response_failed_event.rbs b/sig/openai/models/responses/response_failed_event.rbs index fe0562a3..27befafe 100644 --- a/sig/openai/models/responses/response_failed_event.rbs +++ b/sig/openai/models/responses/response_failed_event.rbs @@ -2,17 +2,30 @@ module OpenAI module Models module Responses type response_failed_event = - { response: OpenAI::Responses::Response, type: :"response.failed" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.failed" + } class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.failed" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.failed" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.failed" + } end end end diff --git a/sig/openai/models/responses/response_file_search_call_completed_event.rbs b/sig/openai/models/responses/response_file_search_call_completed_event.rbs index dc0013fc..80a0bc8e 100644 --- a/sig/openai/models/responses/response_file_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_completed_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.file_search_call.completed" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.file_search_call.completed" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.file_search_call.completed" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.file_search_call.completed" + } end end end diff --git a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs index 36f69c57..333b4e44 100644 --- a/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_in_progress_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.file_search_call.in_progress" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.file_search_call.in_progress" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.file_search_call.in_progress" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.file_search_call.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_file_search_call_searching_event.rbs b/sig/openai/models/responses/response_file_search_call_searching_event.rbs index f1994439..66149ea3 100644 --- a/sig/openai/models/responses/response_file_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_file_search_call_searching_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.file_search_call.searching" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.file_search_call.searching" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.file_search_call.searching" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.file_search_call.searching" + } end end end diff --git a/sig/openai/models/responses/response_file_search_tool_call.rbs b/sig/openai/models/responses/response_file_search_tool_call.rbs index d97daaa2..b83d2d56 100644 --- a/sig/openai/models/responses/response_file_search_tool_call.rbs +++ b/sig/openai/models/responses/response_file_search_tool_call.rbs @@ -5,7 +5,7 @@ module OpenAI { id: String, queries: ::Array[String], - status: OpenAI::Responses::ResponseFileSearchToolCall::status, + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, type: :file_search_call, results: ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result]? } @@ -15,7 +15,7 @@ module OpenAI attr_accessor queries: ::Array[String] - attr_accessor status: OpenAI::Responses::ResponseFileSearchToolCall::status + attr_accessor status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status attr_accessor type: :file_search_call @@ -24,11 +24,19 @@ module OpenAI def initialize: ( id: String, queries: ::Array[String], - status: OpenAI::Responses::ResponseFileSearchToolCall::status, + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, ?results: ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result]?, ?type: :file_search_call ) -> void + def to_hash: -> { + id: String, + queries: ::Array[String], + status: OpenAI::Models::Responses::ResponseFileSearchToolCall::status, + type: :file_search_call, + results: ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result]? + } + type status = :in_progress | :searching | :completed | :incomplete | :failed @@ -41,12 +49,12 @@ module OpenAI INCOMPLETE: :incomplete FAILED: :failed - def self?.values: -> ::Array[OpenAI::Responses::ResponseFileSearchToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::status] end type result = { - attributes: ::Hash[Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, file_id: String, filename: String, score: Float, @@ -54,7 +62,7 @@ module OpenAI } class Result < OpenAI::Internal::Type::BaseModel - attr_accessor attributes: ::Hash[Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]? attr_reader file_id: String? @@ -73,19 +81,27 @@ module OpenAI def text=: (String) -> String def initialize: ( - ?attributes: ::Hash[Symbol, OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, ?file_id: String, ?filename: String, ?score: Float, ?text: String ) -> void + def to_hash: -> { + attributes: ::Hash[Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute]?, + file_id: String, + filename: String, + score: Float, + text: String + } + type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::ResponseFileSearchToolCall::Result::attribute] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::attribute] end end end diff --git a/sig/openai/models/responses/response_format_text_json_schema_config.rbs b/sig/openai/models/responses/response_format_text_json_schema_config.rbs index 319fd52e..23800680 100644 --- a/sig/openai/models/responses/response_format_text_json_schema_config.rbs +++ b/sig/openai/models/responses/response_format_text_json_schema_config.rbs @@ -30,6 +30,14 @@ module OpenAI ?strict: bool?, ?type: :json_schema ) -> void + + def to_hash: -> { + name: String, + schema: ::Hash[Symbol, top], + type: :json_schema, + description: String, + strict: bool? + } end end end diff --git a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs index e4486520..9cc2eb09 100644 --- a/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_delta_event.rbs @@ -6,6 +6,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.function_call_arguments.delta" } @@ -16,14 +17,25 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.function_call_arguments.delta" def initialize: ( delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.function_call_arguments.delta" ) -> void + + def to_hash: -> { + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.function_call_arguments.delta" + } end end end diff --git a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs index 3e023f5c..acedda9d 100644 --- a/sig/openai/models/responses/response_function_call_arguments_done_event.rbs +++ b/sig/openai/models/responses/response_function_call_arguments_done_event.rbs @@ -6,6 +6,7 @@ module OpenAI arguments: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.function_call_arguments.done" } @@ -16,14 +17,25 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.function_call_arguments.done" def initialize: ( arguments: String, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.function_call_arguments.done" ) -> void + + def to_hash: -> { + arguments: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.function_call_arguments.done" + } end end end diff --git a/sig/openai/models/responses/response_function_tool_call.rbs b/sig/openai/models/responses/response_function_tool_call.rbs index 47884c8a..4da59dc0 100644 --- a/sig/openai/models/responses/response_function_tool_call.rbs +++ b/sig/openai/models/responses/response_function_tool_call.rbs @@ -8,7 +8,7 @@ module OpenAI name: String, type: :function_call, id: String, - status: OpenAI::Responses::ResponseFunctionToolCall::status + status: OpenAI::Models::Responses::ResponseFunctionToolCall::status } class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel @@ -24,21 +24,30 @@ module OpenAI def id=: (String) -> String - attr_reader status: OpenAI::Responses::ResponseFunctionToolCall::status? + attr_reader status: OpenAI::Models::Responses::ResponseFunctionToolCall::status? def status=: ( - OpenAI::Responses::ResponseFunctionToolCall::status - ) -> OpenAI::Responses::ResponseFunctionToolCall::status + OpenAI::Models::Responses::ResponseFunctionToolCall::status + ) -> OpenAI::Models::Responses::ResponseFunctionToolCall::status def initialize: ( arguments: String, call_id: String, name: String, ?id: String, - ?status: OpenAI::Responses::ResponseFunctionToolCall::status, + ?status: OpenAI::Models::Responses::ResponseFunctionToolCall::status, ?type: :function_call ) -> void + def to_hash: -> { + arguments: String, + call_id: String, + name: String, + type: :function_call, + id: String, + status: OpenAI::Models::Responses::ResponseFunctionToolCall::status + } + type status = :in_progress | :completed | :incomplete module Status @@ -48,7 +57,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseFunctionToolCall::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCall::status] end end end diff --git a/sig/openai/models/responses/response_function_tool_call_item.rbs b/sig/openai/models/responses/response_function_tool_call_item.rbs index 81206561..38d76eb8 100644 --- a/sig/openai/models/responses/response_function_tool_call_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_item.rbs @@ -9,6 +9,8 @@ module OpenAI def id=: (String _) -> String def initialize: (id: String) -> void + + def to_hash: -> { id: String } end end end diff --git a/sig/openai/models/responses/response_function_tool_call_output_item.rbs b/sig/openai/models/responses/response_function_tool_call_output_item.rbs index 048189b6..e9a67d83 100644 --- a/sig/openai/models/responses/response_function_tool_call_output_item.rbs +++ b/sig/openai/models/responses/response_function_tool_call_output_item.rbs @@ -7,7 +7,7 @@ module OpenAI call_id: String, output: String, type: :function_call_output, - status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::status + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status } class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel @@ -19,20 +19,28 @@ module OpenAI attr_accessor type: :function_call_output - attr_reader status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::status? + attr_reader status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status? def status=: ( - OpenAI::Responses::ResponseFunctionToolCallOutputItem::status - ) -> OpenAI::Responses::ResponseFunctionToolCallOutputItem::status + OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status + ) -> OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status def initialize: ( id: String, call_id: String, output: String, - ?status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::status, + ?status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status, ?type: :function_call_output ) -> void + def to_hash: -> { + id: String, + call_id: String, + output: String, + type: :function_call_output, + status: OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status + } + type status = :in_progress | :completed | :incomplete module Status @@ -42,7 +50,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseFunctionToolCallOutputItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::status] end end end diff --git a/sig/openai/models/responses/response_function_web_search.rbs b/sig/openai/models/responses/response_function_web_search.rbs index 5efa6740..0aa3c5a6 100644 --- a/sig/openai/models/responses/response_function_web_search.rbs +++ b/sig/openai/models/responses/response_function_web_search.rbs @@ -4,23 +4,29 @@ module OpenAI type response_function_web_search = { id: String, - status: OpenAI::Responses::ResponseFunctionWebSearch::status, + status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, type: :web_search_call } class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor status: OpenAI::Responses::ResponseFunctionWebSearch::status + attr_accessor status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status attr_accessor type: :web_search_call def initialize: ( id: String, - status: OpenAI::Responses::ResponseFunctionWebSearch::status, + status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, ?type: :web_search_call ) -> void + def to_hash: -> { + id: String, + status: OpenAI::Models::Responses::ResponseFunctionWebSearch::status, + type: :web_search_call + } + type status = :in_progress | :searching | :completed | :failed module Status @@ -31,7 +37,7 @@ module OpenAI COMPLETED: :completed FAILED: :failed - def self?.values: -> ::Array[OpenAI::Responses::ResponseFunctionWebSearch::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseFunctionWebSearch::status] end end end diff --git a/sig/openai/models/responses/response_image_gen_call_completed_event.rbs b/sig/openai/models/responses/response_image_gen_call_completed_event.rbs new file mode 100644 index 00000000..aadbdaa5 --- /dev/null +++ b/sig/openai/models/responses/response_image_gen_call_completed_event.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Models + module Responses + type response_image_gen_call_completed_event = + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.completed" + } + + class ResponseImageGenCallCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.image_generation_call.completed" + + def initialize: ( + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.image_generation_call.completed" + ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.completed" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_image_gen_call_generating_event.rbs b/sig/openai/models/responses/response_image_gen_call_generating_event.rbs new file mode 100644 index 00000000..a9e514e9 --- /dev/null +++ b/sig/openai/models/responses/response_image_gen_call_generating_event.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Models + module Responses + type response_image_gen_call_generating_event = + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.generating" + } + + class ResponseImageGenCallGeneratingEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.image_generation_call.generating" + + def initialize: ( + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.image_generation_call.generating" + ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.generating" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs b/sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs new file mode 100644 index 00000000..70a36323 --- /dev/null +++ b/sig/openai/models/responses/response_image_gen_call_in_progress_event.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Models + module Responses + type response_image_gen_call_in_progress_event = + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.in_progress" + } + + class ResponseImageGenCallInProgressEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.image_generation_call.in_progress" + + def initialize: ( + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.image_generation_call.in_progress" + ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.in_progress" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs b/sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs new file mode 100644 index 00000000..72ebce9d --- /dev/null +++ b/sig/openai/models/responses/response_image_gen_call_partial_image_event.rbs @@ -0,0 +1,47 @@ +module OpenAI + module Models + module Responses + type response_image_gen_call_partial_image_event = + { + item_id: String, + output_index: Integer, + :partial_image_b64 => String, + partial_image_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.partial_image" + } + + class ResponseImageGenCallPartialImageEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor partial_image_b64: String + + attr_accessor partial_image_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.image_generation_call.partial_image" + + def initialize: ( + item_id: String, + output_index: Integer, + partial_image_b64: String, + partial_image_index: Integer, + sequence_number: Integer, + ?type: :"response.image_generation_call.partial_image" + ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + :partial_image_b64 => String, + partial_image_index: Integer, + sequence_number: Integer, + type: :"response.image_generation_call.partial_image" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_in_progress_event.rbs b/sig/openai/models/responses/response_in_progress_event.rbs index 4d9f9e2a..f3d877eb 100644 --- a/sig/openai/models/responses/response_in_progress_event.rbs +++ b/sig/openai/models/responses/response_in_progress_event.rbs @@ -2,17 +2,30 @@ module OpenAI module Models module Responses type response_in_progress_event = - { response: OpenAI::Responses::Response, type: :"response.in_progress" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.in_progress" + } class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.in_progress" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.in_progress" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_incomplete_event.rbs b/sig/openai/models/responses/response_incomplete_event.rbs index 5e527b2f..179657cd 100644 --- a/sig/openai/models/responses/response_incomplete_event.rbs +++ b/sig/openai/models/responses/response_incomplete_event.rbs @@ -2,17 +2,30 @@ module OpenAI module Models module Responses type response_incomplete_event = - { response: OpenAI::Responses::Response, type: :"response.incomplete" } + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.incomplete" + } class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel attr_accessor response: OpenAI::Responses::Response + attr_accessor sequence_number: Integer + attr_accessor type: :"response.incomplete" def initialize: ( response: OpenAI::Responses::Response, + sequence_number: Integer, ?type: :"response.incomplete" ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.incomplete" + } end end end diff --git a/sig/openai/models/responses/response_input_audio.rbs b/sig/openai/models/responses/response_input_audio.rbs index 542c1371..42b5a713 100644 --- a/sig/openai/models/responses/response_input_audio.rbs +++ b/sig/openai/models/responses/response_input_audio.rbs @@ -4,23 +4,29 @@ module OpenAI type response_input_audio = { data: String, - format_: OpenAI::Responses::ResponseInputAudio::format_, + format_: OpenAI::Models::Responses::ResponseInputAudio::format_, type: :input_audio } class ResponseInputAudio < OpenAI::Internal::Type::BaseModel attr_accessor data: String - attr_accessor format_: OpenAI::Responses::ResponseInputAudio::format_ + attr_accessor format_: OpenAI::Models::Responses::ResponseInputAudio::format_ attr_accessor type: :input_audio def initialize: ( data: String, - format_: OpenAI::Responses::ResponseInputAudio::format_, + format_: OpenAI::Models::Responses::ResponseInputAudio::format_, ?type: :input_audio ) -> void + def to_hash: -> { + data: String, + format_: OpenAI::Models::Responses::ResponseInputAudio::format_, + type: :input_audio + } + type format_ = :mp3 | :wav module Format @@ -29,7 +35,7 @@ module OpenAI MP3: :mp3 WAV: :wav - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputAudio::format_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputAudio::format_] end end end diff --git a/sig/openai/models/responses/response_input_file.rbs b/sig/openai/models/responses/response_input_file.rbs index c5060c70..bdfcd495 100644 --- a/sig/openai/models/responses/response_input_file.rbs +++ b/sig/openai/models/responses/response_input_file.rbs @@ -28,6 +28,13 @@ module OpenAI ?filename: String, ?type: :input_file ) -> void + + def to_hash: -> { + type: :input_file, + file_data: String, + file_id: String?, + filename: String + } end end end diff --git a/sig/openai/models/responses/response_input_image.rbs b/sig/openai/models/responses/response_input_image.rbs index ebc718c8..6b9cf49e 100644 --- a/sig/openai/models/responses/response_input_image.rbs +++ b/sig/openai/models/responses/response_input_image.rbs @@ -3,14 +3,14 @@ module OpenAI module Responses type response_input_image = { - detail: OpenAI::Responses::ResponseInputImage::detail, + detail: OpenAI::Models::Responses::ResponseInputImage::detail, type: :input_image, file_id: String?, image_url: String? } class ResponseInputImage < OpenAI::Internal::Type::BaseModel - attr_accessor detail: OpenAI::Responses::ResponseInputImage::detail + attr_accessor detail: OpenAI::Models::Responses::ResponseInputImage::detail attr_accessor type: :input_image @@ -19,12 +19,19 @@ module OpenAI attr_accessor image_url: String? def initialize: ( - detail: OpenAI::Responses::ResponseInputImage::detail, + detail: OpenAI::Models::Responses::ResponseInputImage::detail, ?file_id: String?, ?image_url: String?, ?type: :input_image ) -> void + def to_hash: -> { + detail: OpenAI::Models::Responses::ResponseInputImage::detail, + type: :input_image, + file_id: String?, + image_url: String? + } + type detail = :low | :high | :auto module Detail @@ -34,7 +41,7 @@ module OpenAI HIGH: :high AUTO: :auto - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputImage::detail] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputImage::detail] end end end diff --git a/sig/openai/models/responses/response_input_item.rbs b/sig/openai/models/responses/response_input_item.rbs index f3cb1160..35bf8908 100644 --- a/sig/openai/models/responses/response_input_item.rbs +++ b/sig/openai/models/responses/response_input_item.rbs @@ -12,6 +12,14 @@ module OpenAI | OpenAI::Responses::ResponseFunctionToolCall | OpenAI::Responses::ResponseInputItem::FunctionCallOutput | OpenAI::Responses::ResponseReasoningItem + | OpenAI::Responses::ResponseInputItem::ImageGenerationCall + | OpenAI::Responses::ResponseCodeInterpreterToolCall + | OpenAI::Responses::ResponseInputItem::LocalShellCall + | OpenAI::Responses::ResponseInputItem::LocalShellCallOutput + | OpenAI::Responses::ResponseInputItem::McpListTools + | OpenAI::Responses::ResponseInputItem::McpApprovalRequest + | OpenAI::Responses::ResponseInputItem::McpApprovalResponse + | OpenAI::Responses::ResponseInputItem::McpCall | OpenAI::Responses::ResponseInputItem::ItemReference module ResponseInputItem @@ -20,35 +28,42 @@ module OpenAI type message = { content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Responses::ResponseInputItem::Message::role, - status: OpenAI::Responses::ResponseInputItem::Message::status, - type: OpenAI::Responses::ResponseInputItem::Message::type_ + role: OpenAI::Models::Responses::ResponseInputItem::Message::role, + status: OpenAI::Models::Responses::ResponseInputItem::Message::status, + type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ } class Message < OpenAI::Internal::Type::BaseModel attr_accessor content: OpenAI::Models::Responses::response_input_message_content_list - attr_accessor role: OpenAI::Responses::ResponseInputItem::Message::role + attr_accessor role: OpenAI::Models::Responses::ResponseInputItem::Message::role - attr_reader status: OpenAI::Responses::ResponseInputItem::Message::status? + attr_reader status: OpenAI::Models::Responses::ResponseInputItem::Message::status? def status=: ( - OpenAI::Responses::ResponseInputItem::Message::status - ) -> OpenAI::Responses::ResponseInputItem::Message::status + OpenAI::Models::Responses::ResponseInputItem::Message::status + ) -> OpenAI::Models::Responses::ResponseInputItem::Message::status - attr_reader type: OpenAI::Responses::ResponseInputItem::Message::type_? + attr_reader type: OpenAI::Models::Responses::ResponseInputItem::Message::type_? def type=: ( - OpenAI::Responses::ResponseInputItem::Message::type_ - ) -> OpenAI::Responses::ResponseInputItem::Message::type_ + OpenAI::Models::Responses::ResponseInputItem::Message::type_ + ) -> OpenAI::Models::Responses::ResponseInputItem::Message::type_ def initialize: ( content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Responses::ResponseInputItem::Message::role, - ?status: OpenAI::Responses::ResponseInputItem::Message::status, - ?type: OpenAI::Responses::ResponseInputItem::Message::type_ + role: OpenAI::Models::Responses::ResponseInputItem::Message::role, + ?status: OpenAI::Models::Responses::ResponseInputItem::Message::status, + ?type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ ) -> void + def to_hash: -> { + content: OpenAI::Models::Responses::response_input_message_content_list, + role: OpenAI::Models::Responses::ResponseInputItem::Message::role, + status: OpenAI::Models::Responses::ResponseInputItem::Message::status, + type: OpenAI::Models::Responses::ResponseInputItem::Message::type_ + } + type role = :user | :system | :developer module Role @@ -58,7 +73,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::Message::role] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::role] end type status = :in_progress | :completed | :incomplete @@ -70,7 +85,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::Message::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::status] end type type_ = :message @@ -80,7 +95,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::Message::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::Message::type_] end end @@ -91,7 +106,7 @@ module OpenAI type: :computer_call_output, id: String?, acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, - status: OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status? + status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? } class ComputerCallOutput < OpenAI::Internal::Type::BaseModel @@ -105,17 +120,26 @@ module OpenAI attr_accessor acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]? - attr_accessor status: OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status? + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? def initialize: ( call_id: String, output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, ?id: String?, ?acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, - ?status: OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status?, + ?status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status?, ?type: :computer_call_output ) -> void + def to_hash: -> { + call_id: String, + output: OpenAI::Responses::ResponseComputerToolCallOutputScreenshot, + type: :computer_call_output, + id: String?, + acknowledged_safety_checks: ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck]?, + status: OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status? + } + type acknowledged_safety_check = { id: String, code: String?, message: String? } @@ -131,6 +155,8 @@ module OpenAI ?code: String?, ?message: String? ) -> void + + def to_hash: -> { id: String, code: String?, message: String? } end type status = :in_progress | :completed | :incomplete @@ -142,7 +168,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::ComputerCallOutput::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::status] end end @@ -152,7 +178,7 @@ module OpenAI output: String, type: :function_call_output, id: String?, - status: OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status? + status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? } class FunctionCallOutput < OpenAI::Internal::Type::BaseModel @@ -164,16 +190,160 @@ module OpenAI attr_accessor id: String? - attr_accessor status: OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status? + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? def initialize: ( call_id: String, output: String, ?id: String?, - ?status: OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status?, + ?status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status?, ?type: :function_call_output ) -> void + def to_hash: -> { + call_id: String, + output: String, + type: :function_call_output, + id: String?, + status: OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status? + } + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::status] + end + end + + type image_generation_call = + { + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::status, + type: :image_generation_call + } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor result: String? + + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::status + + attr_accessor type: :image_generation_call + + def initialize: ( + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::status, + ?type: :image_generation_call + ) -> void + + def to_hash: -> { + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::status, + type: :image_generation_call + } + + type status = :in_progress | :completed | :generating | :failed + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + GENERATING: :generating + FAILED: :failed + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ImageGenerationCall::status] + end + end + + type local_shell_call = + { + id: String, + action: OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::status, + type: :local_shell_call + } + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor action: OpenAI::Responses::ResponseInputItem::LocalShellCall::Action + + attr_accessor call_id: String + + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::status + + attr_accessor type: :local_shell_call + + def initialize: ( + id: String, + action: OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::status, + ?type: :local_shell_call + ) -> void + + def to_hash: -> { + id: String, + action: OpenAI::Responses::ResponseInputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::status, + type: :local_shell_call + } + + type action = + { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + + class Action < OpenAI::Internal::Type::BaseModel + attr_accessor command: ::Array[String] + + attr_accessor env: ::Hash[Symbol, String] + + attr_accessor type: :exec + + attr_accessor timeout_ms: Integer? + + attr_accessor user: String? + + attr_accessor working_directory: String? + + def initialize: ( + command: ::Array[String], + env: ::Hash[Symbol, String], + ?timeout_ms: Integer?, + ?user: String?, + ?working_directory: String?, + ?type: :exec + ) -> void + + def to_hash: -> { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + end + type status = :in_progress | :completed | :incomplete module Status @@ -183,26 +353,265 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::FunctionCallOutput::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::LocalShellCall::status] end end + type local_shell_call_output = + { + id: String, + output: String, + type: :local_shell_call_output, + status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::status? + } + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor output: String + + attr_accessor type: :local_shell_call_output + + attr_accessor status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::status? + + def initialize: ( + id: String, + output: String, + ?status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::status?, + ?type: :local_shell_call_output + ) -> void + + def to_hash: -> { + id: String, + output: String, + type: :local_shell_call_output, + status: OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::status? + } + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::LocalShellCallOutput::status] + end + end + + type mcp_list_tools = + { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseInputItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + class McpListTools < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor server_label: String + + attr_accessor tools: ::Array[OpenAI::Responses::ResponseInputItem::McpListTools::Tool] + + attr_accessor type: :mcp_list_tools + + attr_accessor error: String? + + def initialize: ( + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseInputItem::McpListTools::Tool], + ?error: String?, + ?type: :mcp_list_tools + ) -> void + + def to_hash: -> { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseInputItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + type tool = + { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + + class Tool < OpenAI::Internal::Type::BaseModel + attr_accessor input_schema: top + + attr_accessor name: String + + attr_accessor annotations: top? + + attr_accessor description: String? + + def initialize: ( + input_schema: top, + name: String, + ?annotations: top?, + ?description: String? + ) -> void + + def to_hash: -> { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + end + end + + type mcp_approval_request = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_approval_request + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?type: :mcp_approval_request + ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + end + + type mcp_approval_response = + { + approval_request_id: String, + approve: bool, + type: :mcp_approval_response, + id: String?, + reason: String? + } + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + attr_accessor approval_request_id: String + + attr_accessor approve: bool + + attr_accessor type: :mcp_approval_response + + attr_accessor id: String? + + attr_accessor reason: String? + + def initialize: ( + approval_request_id: String, + approve: bool, + ?id: String?, + ?reason: String?, + ?type: :mcp_approval_response + ) -> void + + def to_hash: -> { + approval_request_id: String, + approve: bool, + type: :mcp_approval_response, + id: String?, + reason: String? + } + end + + type mcp_call = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + + class McpCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_call + + attr_accessor error: String? + + attr_accessor output: String? + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?error: String?, + ?output: String?, + ?type: :mcp_call + ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + end + type item_reference = { id: String, - type: OpenAI::Responses::ResponseInputItem::ItemReference::type_? + type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? } class ItemReference < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor type: OpenAI::Responses::ResponseInputItem::ItemReference::type_? + attr_accessor type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? def initialize: ( id: String, - ?type: OpenAI::Responses::ResponseInputItem::ItemReference::type_? + ?type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? ) -> void + def to_hash: -> { + id: String, + type: OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_? + } + type type_ = :item_reference module Type @@ -210,7 +619,7 @@ module OpenAI ITEM_REFERENCE: :item_reference - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputItem::ItemReference::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputItem::ItemReference::type_] end end diff --git a/sig/openai/models/responses/response_input_message_item.rbs b/sig/openai/models/responses/response_input_message_item.rbs index f9cfd8b8..ff6874a7 100644 --- a/sig/openai/models/responses/response_input_message_item.rbs +++ b/sig/openai/models/responses/response_input_message_item.rbs @@ -5,9 +5,9 @@ module OpenAI { id: String, content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Responses::ResponseInputMessageItem::role, - status: OpenAI::Responses::ResponseInputMessageItem::status, - type: OpenAI::Responses::ResponseInputMessageItem::type_ + role: OpenAI::Models::Responses::ResponseInputMessageItem::role, + status: OpenAI::Models::Responses::ResponseInputMessageItem::status, + type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ } class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel @@ -15,28 +15,36 @@ module OpenAI attr_accessor content: OpenAI::Models::Responses::response_input_message_content_list - attr_accessor role: OpenAI::Responses::ResponseInputMessageItem::role + attr_accessor role: OpenAI::Models::Responses::ResponseInputMessageItem::role - attr_reader status: OpenAI::Responses::ResponseInputMessageItem::status? + attr_reader status: OpenAI::Models::Responses::ResponseInputMessageItem::status? def status=: ( - OpenAI::Responses::ResponseInputMessageItem::status - ) -> OpenAI::Responses::ResponseInputMessageItem::status + OpenAI::Models::Responses::ResponseInputMessageItem::status + ) -> OpenAI::Models::Responses::ResponseInputMessageItem::status - attr_reader type: OpenAI::Responses::ResponseInputMessageItem::type_? + attr_reader type: OpenAI::Models::Responses::ResponseInputMessageItem::type_? def type=: ( - OpenAI::Responses::ResponseInputMessageItem::type_ - ) -> OpenAI::Responses::ResponseInputMessageItem::type_ + OpenAI::Models::Responses::ResponseInputMessageItem::type_ + ) -> OpenAI::Models::Responses::ResponseInputMessageItem::type_ def initialize: ( id: String, content: OpenAI::Models::Responses::response_input_message_content_list, - role: OpenAI::Responses::ResponseInputMessageItem::role, - ?status: OpenAI::Responses::ResponseInputMessageItem::status, - ?type: OpenAI::Responses::ResponseInputMessageItem::type_ + role: OpenAI::Models::Responses::ResponseInputMessageItem::role, + ?status: OpenAI::Models::Responses::ResponseInputMessageItem::status, + ?type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ ) -> void + def to_hash: -> { + id: String, + content: OpenAI::Models::Responses::response_input_message_content_list, + role: OpenAI::Models::Responses::ResponseInputMessageItem::role, + status: OpenAI::Models::Responses::ResponseInputMessageItem::status, + type: OpenAI::Models::Responses::ResponseInputMessageItem::type_ + } + type role = :user | :system | :developer module Role @@ -46,7 +54,7 @@ module OpenAI SYSTEM: :system DEVELOPER: :developer - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputMessageItem::role] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::role] end type status = :in_progress | :completed | :incomplete @@ -58,7 +66,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputMessageItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::status] end type type_ = :message @@ -68,7 +76,7 @@ module OpenAI MESSAGE: :message - def self?.values: -> ::Array[OpenAI::Responses::ResponseInputMessageItem::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseInputMessageItem::type_] end end end diff --git a/sig/openai/models/responses/response_input_text.rbs b/sig/openai/models/responses/response_input_text.rbs index e60a488e..76771a1f 100644 --- a/sig/openai/models/responses/response_input_text.rbs +++ b/sig/openai/models/responses/response_input_text.rbs @@ -9,6 +9,8 @@ module OpenAI attr_accessor type: :input_text def initialize: (text: String, ?type: :input_text) -> void + + def to_hash: -> { text: String, type: :input_text } end end end diff --git a/sig/openai/models/responses/response_item.rbs b/sig/openai/models/responses/response_item.rbs index ab5eaa4c..f8f79e06 100644 --- a/sig/openai/models/responses/response_item.rbs +++ b/sig/openai/models/responses/response_item.rbs @@ -10,10 +10,388 @@ module OpenAI | OpenAI::Responses::ResponseFunctionWebSearch | OpenAI::Responses::ResponseFunctionToolCallItem | OpenAI::Responses::ResponseFunctionToolCallOutputItem + | OpenAI::Responses::ResponseItem::ImageGenerationCall + | OpenAI::Responses::ResponseCodeInterpreterToolCall + | OpenAI::Responses::ResponseItem::LocalShellCall + | OpenAI::Responses::ResponseItem::LocalShellCallOutput + | OpenAI::Responses::ResponseItem::McpListTools + | OpenAI::Responses::ResponseItem::McpApprovalRequest + | OpenAI::Responses::ResponseItem::McpApprovalResponse + | OpenAI::Responses::ResponseItem::McpCall module ResponseItem extend OpenAI::Internal::Type::Union + type image_generation_call = + { + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::status, + type: :image_generation_call + } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor result: String? + + attr_accessor status: OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::status + + attr_accessor type: :image_generation_call + + def initialize: ( + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::status, + ?type: :image_generation_call + ) -> void + + def to_hash: -> { + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::status, + type: :image_generation_call + } + + type status = :in_progress | :completed | :generating | :failed + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + GENERATING: :generating + FAILED: :failed + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseItem::ImageGenerationCall::status] + end + end + + type local_shell_call = + { + id: String, + action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseItem::LocalShellCall::status, + type: :local_shell_call + } + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor action: OpenAI::Responses::ResponseItem::LocalShellCall::Action + + attr_accessor call_id: String + + attr_accessor status: OpenAI::Models::Responses::ResponseItem::LocalShellCall::status + + attr_accessor type: :local_shell_call + + def initialize: ( + id: String, + action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseItem::LocalShellCall::status, + ?type: :local_shell_call + ) -> void + + def to_hash: -> { + id: String, + action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseItem::LocalShellCall::status, + type: :local_shell_call + } + + type action = + { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + + class Action < OpenAI::Internal::Type::BaseModel + attr_accessor command: ::Array[String] + + attr_accessor env: ::Hash[Symbol, String] + + attr_accessor type: :exec + + attr_accessor timeout_ms: Integer? + + attr_accessor user: String? + + attr_accessor working_directory: String? + + def initialize: ( + command: ::Array[String], + env: ::Hash[Symbol, String], + ?timeout_ms: Integer?, + ?user: String?, + ?working_directory: String?, + ?type: :exec + ) -> void + + def to_hash: -> { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + end + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseItem::LocalShellCall::status] + end + end + + type local_shell_call_output = + { + id: String, + output: String, + type: :local_shell_call_output, + status: OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::status? + } + + class LocalShellCallOutput < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor output: String + + attr_accessor type: :local_shell_call_output + + attr_accessor status: OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::status? + + def initialize: ( + id: String, + output: String, + ?status: OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::status?, + ?type: :local_shell_call_output + ) -> void + + def to_hash: -> { + id: String, + output: String, + type: :local_shell_call_output, + status: OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::status? + } + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseItem::LocalShellCallOutput::status] + end + end + + type mcp_list_tools = + { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + class McpListTools < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor server_label: String + + attr_accessor tools: ::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool] + + attr_accessor type: :mcp_list_tools + + attr_accessor error: String? + + def initialize: ( + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool], + ?error: String?, + ?type: :mcp_list_tools + ) -> void + + def to_hash: -> { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + type tool = + { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + + class Tool < OpenAI::Internal::Type::BaseModel + attr_accessor input_schema: top + + attr_accessor name: String + + attr_accessor annotations: top? + + attr_accessor description: String? + + def initialize: ( + input_schema: top, + name: String, + ?annotations: top?, + ?description: String? + ) -> void + + def to_hash: -> { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + end + end + + type mcp_approval_request = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_approval_request + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?type: :mcp_approval_request + ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + end + + type mcp_approval_response = + { + id: String, + approval_request_id: String, + approve: bool, + type: :mcp_approval_response, + reason: String? + } + + class McpApprovalResponse < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor approval_request_id: String + + attr_accessor approve: bool + + attr_accessor type: :mcp_approval_response + + attr_accessor reason: String? + + def initialize: ( + id: String, + approval_request_id: String, + approve: bool, + ?reason: String?, + ?type: :mcp_approval_response + ) -> void + + def to_hash: -> { + id: String, + approval_request_id: String, + approve: bool, + type: :mcp_approval_response, + reason: String? + } + end + + type mcp_call = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + + class McpCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_call + + attr_accessor error: String? + + attr_accessor output: String? + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?error: String?, + ?output: String?, + ?type: :mcp_call + ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + end + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_item] end end diff --git a/sig/openai/models/responses/response_item_list.rbs b/sig/openai/models/responses/response_item_list.rbs index e7390def..7614060c 100644 --- a/sig/openai/models/responses/response_item_list.rbs +++ b/sig/openai/models/responses/response_item_list.rbs @@ -30,6 +30,14 @@ module OpenAI last_id: String, ?object: :list ) -> void + + def to_hash: -> { + data: ::Array[OpenAI::Models::Responses::response_item], + first_id: String, + has_more: bool, + last_id: String, + object: :list + } end end end diff --git a/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs new file mode 100644 index 00000000..bb94cc20 --- /dev/null +++ b/sig/openai/models/responses/response_mcp_call_arguments_delta_event.rbs @@ -0,0 +1,42 @@ +module OpenAI + module Models + module Responses + type response_mcp_call_arguments_delta_event = + { + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.arguments_delta" + } + + class ResponseMcpCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseModel + attr_accessor delta: top + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.mcp_call.arguments_delta" + + def initialize: ( + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.mcp_call.arguments_delta" + ) -> void + + def to_hash: -> { + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.arguments_delta" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs new file mode 100644 index 00000000..f0a16dcf --- /dev/null +++ b/sig/openai/models/responses/response_mcp_call_arguments_done_event.rbs @@ -0,0 +1,42 @@ +module OpenAI + module Models + module Responses + type response_mcp_call_arguments_done_event = + { + arguments: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.arguments_done" + } + + class ResponseMcpCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor arguments: top + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.mcp_call.arguments_done" + + def initialize: ( + arguments: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.mcp_call.arguments_done" + ) -> void + + def to_hash: -> { + arguments: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.arguments_done" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_call_completed_event.rbs b/sig/openai/models/responses/response_mcp_call_completed_event.rbs new file mode 100644 index 00000000..336d5eed --- /dev/null +++ b/sig/openai/models/responses/response_mcp_call_completed_event.rbs @@ -0,0 +1,24 @@ +module OpenAI + module Models + module Responses + type response_mcp_call_completed_event = + { sequence_number: Integer, type: :"response.mcp_call.completed" } + + class ResponseMcpCallCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.mcp_call.completed" + + def initialize: ( + sequence_number: Integer, + ?type: :"response.mcp_call.completed" + ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.mcp_call.completed" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_call_failed_event.rbs b/sig/openai/models/responses/response_mcp_call_failed_event.rbs new file mode 100644 index 00000000..9d643dbc --- /dev/null +++ b/sig/openai/models/responses/response_mcp_call_failed_event.rbs @@ -0,0 +1,24 @@ +module OpenAI + module Models + module Responses + type response_mcp_call_failed_event = + { sequence_number: Integer, type: :"response.mcp_call.failed" } + + class ResponseMcpCallFailedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.mcp_call.failed" + + def initialize: ( + sequence_number: Integer, + ?type: :"response.mcp_call.failed" + ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.mcp_call.failed" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs b/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs new file mode 100644 index 00000000..7dc8afd2 --- /dev/null +++ b/sig/openai/models/responses/response_mcp_call_in_progress_event.rbs @@ -0,0 +1,37 @@ +module OpenAI + module Models + module Responses + type response_mcp_call_in_progress_event = + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.in_progress" + } + + class ResponseMcpCallInProgressEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.mcp_call.in_progress" + + def initialize: ( + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.mcp_call.in_progress" + ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.mcp_call.in_progress" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs new file mode 100644 index 00000000..af17675a --- /dev/null +++ b/sig/openai/models/responses/response_mcp_list_tools_completed_event.rbs @@ -0,0 +1,24 @@ +module OpenAI + module Models + module Responses + type response_mcp_list_tools_completed_event = + { sequence_number: Integer, type: :"response.mcp_list_tools.completed" } + + class ResponseMcpListToolsCompletedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.mcp_list_tools.completed" + + def initialize: ( + sequence_number: Integer, + ?type: :"response.mcp_list_tools.completed" + ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.mcp_list_tools.completed" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs new file mode 100644 index 00000000..fab64580 --- /dev/null +++ b/sig/openai/models/responses/response_mcp_list_tools_failed_event.rbs @@ -0,0 +1,24 @@ +module OpenAI + module Models + module Responses + type response_mcp_list_tools_failed_event = + { sequence_number: Integer, type: :"response.mcp_list_tools.failed" } + + class ResponseMcpListToolsFailedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.mcp_list_tools.failed" + + def initialize: ( + sequence_number: Integer, + ?type: :"response.mcp_list_tools.failed" + ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.mcp_list_tools.failed" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs b/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs new file mode 100644 index 00000000..72e57c18 --- /dev/null +++ b/sig/openai/models/responses/response_mcp_list_tools_in_progress_event.rbs @@ -0,0 +1,27 @@ +module OpenAI + module Models + module Responses + type response_mcp_list_tools_in_progress_event = + { + sequence_number: Integer, + type: :"response.mcp_list_tools.in_progress" + } + + class ResponseMcpListToolsInProgressEvent < OpenAI::Internal::Type::BaseModel + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.mcp_list_tools.in_progress" + + def initialize: ( + sequence_number: Integer, + ?type: :"response.mcp_list_tools.in_progress" + ) -> void + + def to_hash: -> { + sequence_number: Integer, + type: :"response.mcp_list_tools.in_progress" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_output_audio.rbs b/sig/openai/models/responses/response_output_audio.rbs index a2c7f16e..0a8d8e3a 100644 --- a/sig/openai/models/responses/response_output_audio.rbs +++ b/sig/openai/models/responses/response_output_audio.rbs @@ -16,6 +16,12 @@ module OpenAI transcript: String, ?type: :output_audio ) -> void + + def to_hash: -> { + data: String, + transcript: String, + type: :output_audio + } end end end diff --git a/sig/openai/models/responses/response_output_item.rbs b/sig/openai/models/responses/response_output_item.rbs index cee5b2c7..dc0254bd 100644 --- a/sig/openai/models/responses/response_output_item.rbs +++ b/sig/openai/models/responses/response_output_item.rbs @@ -8,10 +8,305 @@ module OpenAI | OpenAI::Responses::ResponseFunctionWebSearch | OpenAI::Responses::ResponseComputerToolCall | OpenAI::Responses::ResponseReasoningItem + | OpenAI::Responses::ResponseOutputItem::ImageGenerationCall + | OpenAI::Responses::ResponseCodeInterpreterToolCall + | OpenAI::Responses::ResponseOutputItem::LocalShellCall + | OpenAI::Responses::ResponseOutputItem::McpCall + | OpenAI::Responses::ResponseOutputItem::McpListTools + | OpenAI::Responses::ResponseOutputItem::McpApprovalRequest module ResponseOutputItem extend OpenAI::Internal::Type::Union + type image_generation_call = + { + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::status, + type: :image_generation_call + } + + class ImageGenerationCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor result: String? + + attr_accessor status: OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::status + + attr_accessor type: :image_generation_call + + def initialize: ( + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::status, + ?type: :image_generation_call + ) -> void + + def to_hash: -> { + id: String, + result: String?, + status: OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::status, + type: :image_generation_call + } + + type status = :in_progress | :completed | :generating | :failed + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + GENERATING: :generating + FAILED: :failed + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseOutputItem::ImageGenerationCall::status] + end + end + + type local_shell_call = + { + id: String, + action: OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::status, + type: :local_shell_call + } + + class LocalShellCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor action: OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action + + attr_accessor call_id: String + + attr_accessor status: OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::status + + attr_accessor type: :local_shell_call + + def initialize: ( + id: String, + action: OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::status, + ?type: :local_shell_call + ) -> void + + def to_hash: -> { + id: String, + action: OpenAI::Responses::ResponseOutputItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::status, + type: :local_shell_call + } + + type action = + { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + + class Action < OpenAI::Internal::Type::BaseModel + attr_accessor command: ::Array[String] + + attr_accessor env: ::Hash[Symbol, String] + + attr_accessor type: :exec + + attr_accessor timeout_ms: Integer? + + attr_accessor user: String? + + attr_accessor working_directory: String? + + def initialize: ( + command: ::Array[String], + env: ::Hash[Symbol, String], + ?timeout_ms: Integer?, + ?user: String?, + ?working_directory: String?, + ?type: :exec + ) -> void + + def to_hash: -> { + command: ::Array[String], + env: ::Hash[Symbol, String], + type: :exec, + timeout_ms: Integer?, + user: String?, + working_directory: String? + } + end + + type status = :in_progress | :completed | :incomplete + + module Status + extend OpenAI::Internal::Type::Enum + + IN_PROGRESS: :in_progress + COMPLETED: :completed + INCOMPLETE: :incomplete + + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseOutputItem::LocalShellCall::status] + end + end + + type mcp_call = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + + class McpCall < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_call + + attr_accessor error: String? + + attr_accessor output: String? + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?error: String?, + ?output: String?, + ?type: :mcp_call + ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_call, + error: String?, + output: String? + } + end + + type mcp_list_tools = + { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + class McpListTools < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor server_label: String + + attr_accessor tools: ::Array[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool] + + attr_accessor type: :mcp_list_tools + + attr_accessor error: String? + + def initialize: ( + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool], + ?error: String?, + ?type: :mcp_list_tools + ) -> void + + def to_hash: -> { + id: String, + server_label: String, + tools: ::Array[OpenAI::Responses::ResponseOutputItem::McpListTools::Tool], + type: :mcp_list_tools, + error: String? + } + + type tool = + { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + + class Tool < OpenAI::Internal::Type::BaseModel + attr_accessor input_schema: top + + attr_accessor name: String + + attr_accessor annotations: top? + + attr_accessor description: String? + + def initialize: ( + input_schema: top, + name: String, + ?annotations: top?, + ?description: String? + ) -> void + + def to_hash: -> { + input_schema: top, + name: String, + annotations: top?, + description: String? + } + end + end + + type mcp_approval_request = + { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + + class McpApprovalRequest < OpenAI::Internal::Type::BaseModel + attr_accessor id: String + + attr_accessor arguments: String + + attr_accessor name: String + + attr_accessor server_label: String + + attr_accessor type: :mcp_approval_request + + def initialize: ( + id: String, + arguments: String, + name: String, + server_label: String, + ?type: :mcp_approval_request + ) -> void + + def to_hash: -> { + id: String, + arguments: String, + name: String, + server_label: String, + type: :mcp_approval_request + } + end + def self?.variants: -> ::Array[OpenAI::Models::Responses::response_output_item] end end diff --git a/sig/openai/models/responses/response_output_item_added_event.rbs b/sig/openai/models/responses/response_output_item_added_event.rbs index 03ac25c8..25781321 100644 --- a/sig/openai/models/responses/response_output_item_added_event.rbs +++ b/sig/openai/models/responses/response_output_item_added_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item: OpenAI::Models::Responses::response_output_item, output_index: Integer, + sequence_number: Integer, type: :"response.output_item.added" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.output_item.added" def initialize: ( item: OpenAI::Models::Responses::response_output_item, output_index: Integer, + sequence_number: Integer, ?type: :"response.output_item.added" ) -> void + + def to_hash: -> { + item: OpenAI::Models::Responses::response_output_item, + output_index: Integer, + sequence_number: Integer, + type: :"response.output_item.added" + } end end end diff --git a/sig/openai/models/responses/response_output_item_done_event.rbs b/sig/openai/models/responses/response_output_item_done_event.rbs index 506bcabd..3012b2ab 100644 --- a/sig/openai/models/responses/response_output_item_done_event.rbs +++ b/sig/openai/models/responses/response_output_item_done_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item: OpenAI::Models::Responses::response_output_item, output_index: Integer, + sequence_number: Integer, type: :"response.output_item.done" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.output_item.done" def initialize: ( item: OpenAI::Models::Responses::response_output_item, output_index: Integer, + sequence_number: Integer, ?type: :"response.output_item.done" ) -> void + + def to_hash: -> { + item: OpenAI::Models::Responses::response_output_item, + output_index: Integer, + sequence_number: Integer, + type: :"response.output_item.done" + } end end end diff --git a/sig/openai/models/responses/response_output_message.rbs b/sig/openai/models/responses/response_output_message.rbs index 47827460..223256b8 100644 --- a/sig/openai/models/responses/response_output_message.rbs +++ b/sig/openai/models/responses/response_output_message.rbs @@ -4,31 +4,39 @@ module OpenAI type response_output_message = { id: String, - content: ::Array[OpenAI::Responses::ResponseOutputMessage::content], + content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], role: :assistant, - status: OpenAI::Responses::ResponseOutputMessage::status, + status: OpenAI::Models::Responses::ResponseOutputMessage::status, type: :message } class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel attr_accessor id: String - attr_accessor content: ::Array[OpenAI::Responses::ResponseOutputMessage::content] + attr_accessor content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content] attr_accessor role: :assistant - attr_accessor status: OpenAI::Responses::ResponseOutputMessage::status + attr_accessor status: OpenAI::Models::Responses::ResponseOutputMessage::status attr_accessor type: :message def initialize: ( id: String, - content: ::Array[OpenAI::Responses::ResponseOutputMessage::content], - status: OpenAI::Responses::ResponseOutputMessage::status, + content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], + status: OpenAI::Models::Responses::ResponseOutputMessage::status, ?role: :assistant, ?type: :message ) -> void + def to_hash: -> { + id: String, + content: ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content], + role: :assistant, + status: OpenAI::Models::Responses::ResponseOutputMessage::status, + type: :message + } + type content = OpenAI::Responses::ResponseOutputText | OpenAI::Responses::ResponseOutputRefusal @@ -36,7 +44,7 @@ module OpenAI module Content extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::Responses::ResponseOutputMessage::content] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseOutputMessage::content] end type status = :in_progress | :completed | :incomplete @@ -48,7 +56,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseOutputMessage::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseOutputMessage::status] end end end diff --git a/sig/openai/models/responses/response_output_refusal.rbs b/sig/openai/models/responses/response_output_refusal.rbs index 14de45aa..e2347cc1 100644 --- a/sig/openai/models/responses/response_output_refusal.rbs +++ b/sig/openai/models/responses/response_output_refusal.rbs @@ -9,6 +9,8 @@ module OpenAI attr_accessor type: :refusal def initialize: (refusal: String, ?type: :refusal) -> void + + def to_hash: -> { refusal: String, type: :refusal } end end end diff --git a/sig/openai/models/responses/response_output_text.rbs b/sig/openai/models/responses/response_output_text.rbs index 368b2b07..d374deb4 100644 --- a/sig/openai/models/responses/response_output_text.rbs +++ b/sig/openai/models/responses/response_output_text.rbs @@ -3,24 +3,30 @@ module OpenAI module Responses type response_output_text = { - annotations: ::Array[OpenAI::Responses::ResponseOutputText::annotation], + annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, type: :output_text } class ResponseOutputText < OpenAI::Internal::Type::BaseModel - attr_accessor annotations: ::Array[OpenAI::Responses::ResponseOutputText::annotation] + attr_accessor annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation] attr_accessor text: String attr_accessor type: :output_text def initialize: ( - annotations: ::Array[OpenAI::Responses::ResponseOutputText::annotation], + annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], text: String, ?type: :output_text ) -> void + def to_hash: -> { + annotations: ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation], + text: String, + type: :output_text + } + type annotation = OpenAI::Responses::ResponseOutputText::Annotation::FileCitation | OpenAI::Responses::ResponseOutputText::Annotation::URLCitation @@ -44,6 +50,12 @@ module OpenAI index: Integer, ?type: :file_citation ) -> void + + def to_hash: -> { + file_id: String, + index: Integer, + type: :file_citation + } end type url_citation = @@ -73,6 +85,14 @@ module OpenAI url: String, ?type: :url_citation ) -> void + + def to_hash: -> { + end_index: Integer, + start_index: Integer, + title: String, + type: :url_citation, + url: String + } end type file_path = { file_id: String, index: Integer, type: :file_path } @@ -89,9 +109,15 @@ module OpenAI index: Integer, ?type: :file_path ) -> void + + def to_hash: -> { + file_id: String, + index: Integer, + type: :file_path + } end - def self?.variants: -> ::Array[OpenAI::Responses::ResponseOutputText::annotation] + def self?.variants: -> ::Array[OpenAI::Models::Responses::ResponseOutputText::annotation] end end end diff --git a/sig/openai/models/responses/response_output_text_annotation_added_event.rbs b/sig/openai/models/responses/response_output_text_annotation_added_event.rbs new file mode 100644 index 00000000..499b4556 --- /dev/null +++ b/sig/openai/models/responses/response_output_text_annotation_added_event.rbs @@ -0,0 +1,52 @@ +module OpenAI + module Models + module Responses + type response_output_text_annotation_added_event = + { + annotation: top, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.output_text_annotation.added" + } + + class ResponseOutputTextAnnotationAddedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor annotation: top + + attr_accessor annotation_index: Integer + + attr_accessor content_index: Integer + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.output_text_annotation.added" + + def initialize: ( + annotation: top, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.output_text_annotation.added" + ) -> void + + def to_hash: -> { + annotation: top, + annotation_index: Integer, + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.output_text_annotation.added" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_queued_event.rbs b/sig/openai/models/responses/response_queued_event.rbs new file mode 100644 index 00000000..4db7787a --- /dev/null +++ b/sig/openai/models/responses/response_queued_event.rbs @@ -0,0 +1,32 @@ +module OpenAI + module Models + module Responses + type response_queued_event = + { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.queued" + } + + class ResponseQueuedEvent < OpenAI::Internal::Type::BaseModel + attr_accessor response: OpenAI::Responses::Response + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.queued" + + def initialize: ( + response: OpenAI::Responses::Response, + sequence_number: Integer, + ?type: :"response.queued" + ) -> void + + def to_hash: -> { + response: OpenAI::Responses::Response, + sequence_number: Integer, + type: :"response.queued" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_delta_event.rbs b/sig/openai/models/responses/response_reasoning_delta_event.rbs new file mode 100644 index 00000000..a56121bc --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_delta_event.rbs @@ -0,0 +1,47 @@ +module OpenAI + module Models + module Responses + type response_reasoning_delta_event = + { + content_index: Integer, + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.reasoning.delta" + } + + class ResponseReasoningDeltaEvent < OpenAI::Internal::Type::BaseModel + attr_accessor content_index: Integer + + attr_accessor delta: top + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor type: :"response.reasoning.delta" + + def initialize: ( + content_index: Integer, + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + ?type: :"response.reasoning.delta" + ) -> void + + def to_hash: -> { + content_index: Integer, + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.reasoning.delta" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_done_event.rbs b/sig/openai/models/responses/response_reasoning_done_event.rbs new file mode 100644 index 00000000..0123cfcf --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_done_event.rbs @@ -0,0 +1,47 @@ +module OpenAI + module Models + module Responses + type response_reasoning_done_event = + { + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + text: String, + type: :"response.reasoning.done" + } + + class ResponseReasoningDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor content_index: Integer + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor text: String + + attr_accessor type: :"response.reasoning.done" + + def initialize: ( + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + text: String, + ?type: :"response.reasoning.done" + ) -> void + + def to_hash: -> { + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + text: String, + type: :"response.reasoning.done" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_item.rbs b/sig/openai/models/responses/response_reasoning_item.rbs index 7efc2a2d..620ee9dc 100644 --- a/sig/openai/models/responses/response_reasoning_item.rbs +++ b/sig/openai/models/responses/response_reasoning_item.rbs @@ -7,7 +7,7 @@ module OpenAI summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], type: :reasoning, encrypted_content: String?, - status: OpenAI::Responses::ResponseReasoningItem::status + status: OpenAI::Models::Responses::ResponseReasoningItem::status } class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel @@ -19,20 +19,28 @@ module OpenAI attr_accessor encrypted_content: String? - attr_reader status: OpenAI::Responses::ResponseReasoningItem::status? + attr_reader status: OpenAI::Models::Responses::ResponseReasoningItem::status? def status=: ( - OpenAI::Responses::ResponseReasoningItem::status - ) -> OpenAI::Responses::ResponseReasoningItem::status + OpenAI::Models::Responses::ResponseReasoningItem::status + ) -> OpenAI::Models::Responses::ResponseReasoningItem::status def initialize: ( id: String, summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], ?encrypted_content: String?, - ?status: OpenAI::Responses::ResponseReasoningItem::status, + ?status: OpenAI::Models::Responses::ResponseReasoningItem::status, ?type: :reasoning ) -> void + def to_hash: -> { + id: String, + summary: ::Array[OpenAI::Responses::ResponseReasoningItem::Summary], + type: :reasoning, + encrypted_content: String?, + status: OpenAI::Models::Responses::ResponseReasoningItem::status + } + type summary = { text: String, type: :summary_text } class Summary < OpenAI::Internal::Type::BaseModel @@ -41,6 +49,8 @@ module OpenAI attr_accessor type: :summary_text def initialize: (text: String, ?type: :summary_text) -> void + + def to_hash: -> { text: String, type: :summary_text } end type status = :in_progress | :completed | :incomplete @@ -52,7 +62,7 @@ module OpenAI COMPLETED: :completed INCOMPLETE: :incomplete - def self?.values: -> ::Array[OpenAI::Responses::ResponseReasoningItem::status] + def self?.values: -> ::Array[OpenAI::Models::Responses::ResponseReasoningItem::status] end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs b/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs new file mode 100644 index 00000000..4e613606 --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_summary_delta_event.rbs @@ -0,0 +1,47 @@ +module OpenAI + module Models + module Responses + type response_reasoning_summary_delta_event = + { + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + type: :"response.reasoning_summary.delta" + } + + class ResponseReasoningSummaryDeltaEvent < OpenAI::Internal::Type::BaseModel + attr_accessor delta: top + + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor summary_index: Integer + + attr_accessor type: :"response.reasoning_summary.delta" + + def initialize: ( + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + ?type: :"response.reasoning_summary.delta" + ) -> void + + def to_hash: -> { + delta: top, + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + type: :"response.reasoning_summary.delta" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_summary_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_done_event.rbs new file mode 100644 index 00000000..cb56e84f --- /dev/null +++ b/sig/openai/models/responses/response_reasoning_summary_done_event.rbs @@ -0,0 +1,47 @@ +module OpenAI + module Models + module Responses + type response_reasoning_summary_done_event = + { + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + text: String, + type: :"response.reasoning_summary.done" + } + + class ResponseReasoningSummaryDoneEvent < OpenAI::Internal::Type::BaseModel + attr_accessor item_id: String + + attr_accessor output_index: Integer + + attr_accessor sequence_number: Integer + + attr_accessor summary_index: Integer + + attr_accessor text: String + + attr_accessor type: :"response.reasoning_summary.done" + + def initialize: ( + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + text: String, + ?type: :"response.reasoning_summary.done" + ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + text: String, + type: :"response.reasoning_summary.done" + } + end + end + end +end diff --git a/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs b/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs index 05c6f71d..597bae6f 100644 --- a/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_part_added_event.rbs @@ -6,6 +6,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + sequence_number: Integer, summary_index: Integer, type: :"response.reasoning_summary_part.added" } @@ -17,6 +18,8 @@ module OpenAI attr_accessor part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part + attr_accessor sequence_number: Integer + attr_accessor summary_index: Integer attr_accessor type: :"response.reasoning_summary_part.added" @@ -25,10 +28,20 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + sequence_number: Integer, summary_index: Integer, ?type: :"response.reasoning_summary_part.added" ) -> void + def to_hash: -> { + item_id: String, + output_index: Integer, + part: OpenAI::Responses::ResponseReasoningSummaryPartAddedEvent::Part, + sequence_number: Integer, + summary_index: Integer, + type: :"response.reasoning_summary_part.added" + } + type part = { text: String, type: :summary_text } class Part < OpenAI::Internal::Type::BaseModel @@ -37,6 +50,8 @@ module OpenAI attr_accessor type: :summary_text def initialize: (text: String, ?type: :summary_text) -> void + + def to_hash: -> { text: String, type: :summary_text } end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs index c03f6cf4..79c25aa5 100644 --- a/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_part_done_event.rbs @@ -6,6 +6,7 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + sequence_number: Integer, summary_index: Integer, type: :"response.reasoning_summary_part.done" } @@ -17,6 +18,8 @@ module OpenAI attr_accessor part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part + attr_accessor sequence_number: Integer + attr_accessor summary_index: Integer attr_accessor type: :"response.reasoning_summary_part.done" @@ -25,10 +28,20 @@ module OpenAI item_id: String, output_index: Integer, part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + sequence_number: Integer, summary_index: Integer, ?type: :"response.reasoning_summary_part.done" ) -> void + def to_hash: -> { + item_id: String, + output_index: Integer, + part: OpenAI::Responses::ResponseReasoningSummaryPartDoneEvent::Part, + sequence_number: Integer, + summary_index: Integer, + type: :"response.reasoning_summary_part.done" + } + type part = { text: String, type: :summary_text } class Part < OpenAI::Internal::Type::BaseModel @@ -37,6 +50,8 @@ module OpenAI attr_accessor type: :summary_text def initialize: (text: String, ?type: :summary_text) -> void + + def to_hash: -> { text: String, type: :summary_text } end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs b/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs index 494fa8c3..8d39bef7 100644 --- a/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_text_delta_event.rbs @@ -6,6 +6,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, type: :"response.reasoning_summary_text.delta" } @@ -17,6 +18,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor summary_index: Integer attr_accessor type: :"response.reasoning_summary_text.delta" @@ -25,9 +28,19 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, ?type: :"response.reasoning_summary_text.delta" ) -> void + + def to_hash: -> { + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + type: :"response.reasoning_summary_text.delta" + } end end end diff --git a/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs b/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs index 36bb9006..50cffece 100644 --- a/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs +++ b/sig/openai/models/responses/response_reasoning_summary_text_done_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, type: :"response.reasoning_summary_text.done" @@ -15,6 +16,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor summary_index: Integer attr_accessor text: String @@ -24,10 +27,20 @@ module OpenAI def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, summary_index: Integer, text: String, ?type: :"response.reasoning_summary_text.done" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + summary_index: Integer, + text: String, + type: :"response.reasoning_summary_text.done" + } end end end diff --git a/sig/openai/models/responses/response_refusal_delta_event.rbs b/sig/openai/models/responses/response_refusal_delta_event.rbs index 2dc7df34..4985d6bd 100644 --- a/sig/openai/models/responses/response_refusal_delta_event.rbs +++ b/sig/openai/models/responses/response_refusal_delta_event.rbs @@ -7,6 +7,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.refusal.delta" } @@ -19,6 +20,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.refusal.delta" def initialize: ( @@ -26,8 +29,18 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.refusal.delta" ) -> void + + def to_hash: -> { + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.refusal.delta" + } end end end diff --git a/sig/openai/models/responses/response_refusal_done_event.rbs b/sig/openai/models/responses/response_refusal_done_event.rbs index 852fbb7b..ce78512b 100644 --- a/sig/openai/models/responses/response_refusal_done_event.rbs +++ b/sig/openai/models/responses/response_refusal_done_event.rbs @@ -7,6 +7,7 @@ module OpenAI item_id: String, output_index: Integer, refusal: String, + sequence_number: Integer, type: :"response.refusal.done" } @@ -19,6 +20,8 @@ module OpenAI attr_accessor refusal: String + attr_accessor sequence_number: Integer + attr_accessor type: :"response.refusal.done" def initialize: ( @@ -26,8 +29,18 @@ module OpenAI item_id: String, output_index: Integer, refusal: String, + sequence_number: Integer, ?type: :"response.refusal.done" ) -> void + + def to_hash: -> { + content_index: Integer, + item_id: String, + output_index: Integer, + refusal: String, + sequence_number: Integer, + type: :"response.refusal.done" + } end end end diff --git a/sig/openai/models/responses/response_retrieve_params.rbs b/sig/openai/models/responses/response_retrieve_params.rbs index 2a8ef689..c90b79c2 100644 --- a/sig/openai/models/responses/response_retrieve_params.rbs +++ b/sig/openai/models/responses/response_retrieve_params.rbs @@ -19,6 +19,11 @@ module OpenAI ?include: ::Array[OpenAI::Models::Responses::response_includable], ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + include: ::Array[OpenAI::Models::Responses::response_includable], + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/responses/response_status.rbs b/sig/openai/models/responses/response_status.rbs index d78fb3a2..e54cd85f 100644 --- a/sig/openai/models/responses/response_status.rbs +++ b/sig/openai/models/responses/response_status.rbs @@ -1,7 +1,8 @@ module OpenAI module Models module Responses - type response_status = :completed | :failed | :in_progress | :incomplete + type response_status = + :completed | :failed | :in_progress | :cancelled | :queued | :incomplete module ResponseStatus extend OpenAI::Internal::Type::Enum @@ -9,6 +10,8 @@ module OpenAI COMPLETED: :completed FAILED: :failed IN_PROGRESS: :in_progress + CANCELLED: :cancelled + QUEUED: :queued INCOMPLETE: :incomplete def self?.values: -> ::Array[OpenAI::Models::Responses::response_status] diff --git a/sig/openai/models/responses/response_stream_event.rbs b/sig/openai/models/responses/response_stream_event.rbs index b8f72810..21511833 100644 --- a/sig/openai/models/responses/response_stream_event.rbs +++ b/sig/openai/models/responses/response_stream_event.rbs @@ -32,12 +32,29 @@ module OpenAI | OpenAI::Responses::ResponseReasoningSummaryTextDoneEvent | OpenAI::Responses::ResponseRefusalDeltaEvent | OpenAI::Responses::ResponseRefusalDoneEvent - | OpenAI::Responses::ResponseTextAnnotationDeltaEvent | OpenAI::Responses::ResponseTextDeltaEvent | OpenAI::Responses::ResponseTextDoneEvent | OpenAI::Responses::ResponseWebSearchCallCompletedEvent | OpenAI::Responses::ResponseWebSearchCallInProgressEvent | OpenAI::Responses::ResponseWebSearchCallSearchingEvent + | OpenAI::Responses::ResponseImageGenCallCompletedEvent + | OpenAI::Responses::ResponseImageGenCallGeneratingEvent + | OpenAI::Responses::ResponseImageGenCallInProgressEvent + | OpenAI::Responses::ResponseImageGenCallPartialImageEvent + | OpenAI::Responses::ResponseMcpCallArgumentsDeltaEvent + | OpenAI::Responses::ResponseMcpCallArgumentsDoneEvent + | OpenAI::Responses::ResponseMcpCallCompletedEvent + | OpenAI::Responses::ResponseMcpCallFailedEvent + | OpenAI::Responses::ResponseMcpCallInProgressEvent + | OpenAI::Responses::ResponseMcpListToolsCompletedEvent + | OpenAI::Responses::ResponseMcpListToolsFailedEvent + | OpenAI::Responses::ResponseMcpListToolsInProgressEvent + | OpenAI::Responses::ResponseOutputTextAnnotationAddedEvent + | OpenAI::Responses::ResponseQueuedEvent + | OpenAI::Responses::ResponseReasoningDeltaEvent + | OpenAI::Responses::ResponseReasoningDoneEvent + | OpenAI::Responses::ResponseReasoningSummaryDeltaEvent + | OpenAI::Responses::ResponseReasoningSummaryDoneEvent module ResponseStreamEvent extend OpenAI::Internal::Type::Union diff --git a/sig/openai/models/responses/response_text_annotation_delta_event.rbs b/sig/openai/models/responses/response_text_annotation_delta_event.rbs deleted file mode 100644 index 36d79ace..00000000 --- a/sig/openai/models/responses/response_text_annotation_delta_event.rbs +++ /dev/null @@ -1,111 +0,0 @@ -module OpenAI - module Models - module Responses - type response_text_annotation_delta_event = - { - annotation: OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation, - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - type: :"response.output_text.annotation.added" - } - - class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel - attr_accessor annotation: OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation - - attr_accessor annotation_index: Integer - - attr_accessor content_index: Integer - - attr_accessor item_id: String - - attr_accessor output_index: Integer - - attr_accessor type: :"response.output_text.annotation.added" - - def initialize: ( - annotation: OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation, - annotation_index: Integer, - content_index: Integer, - item_id: String, - output_index: Integer, - ?type: :"response.output_text.annotation.added" - ) -> void - - type annotation = - OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation - | OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation - | OpenAI::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath - - module Annotation - extend OpenAI::Internal::Type::Union - - type file_citation = - { file_id: String, index: Integer, type: :file_citation } - - class FileCitation < OpenAI::Internal::Type::BaseModel - attr_accessor file_id: String - - attr_accessor index: Integer - - attr_accessor type: :file_citation - - def initialize: ( - file_id: String, - index: Integer, - ?type: :file_citation - ) -> void - end - - type url_citation = - { - end_index: Integer, - start_index: Integer, - title: String, - type: :url_citation, - url: String - } - - class URLCitation < OpenAI::Internal::Type::BaseModel - attr_accessor end_index: Integer - - attr_accessor start_index: Integer - - attr_accessor title: String - - attr_accessor type: :url_citation - - attr_accessor url: String - - def initialize: ( - end_index: Integer, - start_index: Integer, - title: String, - url: String, - ?type: :url_citation - ) -> void - end - - type file_path = { file_id: String, index: Integer, type: :file_path } - - class FilePath < OpenAI::Internal::Type::BaseModel - attr_accessor file_id: String - - attr_accessor index: Integer - - attr_accessor type: :file_path - - def initialize: ( - file_id: String, - index: Integer, - ?type: :file_path - ) -> void - end - - def self?.variants: -> ::Array[OpenAI::Responses::ResponseTextAnnotationDeltaEvent::annotation] - end - end - end - end -end diff --git a/sig/openai/models/responses/response_text_config.rbs b/sig/openai/models/responses/response_text_config.rbs index 816d5b67..e60dae0f 100644 --- a/sig/openai/models/responses/response_text_config.rbs +++ b/sig/openai/models/responses/response_text_config.rbs @@ -14,6 +14,10 @@ module OpenAI def initialize: ( ?format_: OpenAI::Models::Responses::response_format_text_config ) -> void + + def to_hash: -> { + format_: OpenAI::Models::Responses::response_format_text_config + } end end end diff --git a/sig/openai/models/responses/response_text_delta_event.rbs b/sig/openai/models/responses/response_text_delta_event.rbs index c5025499..bbea6304 100644 --- a/sig/openai/models/responses/response_text_delta_event.rbs +++ b/sig/openai/models/responses/response_text_delta_event.rbs @@ -7,6 +7,7 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.output_text.delta" } @@ -19,6 +20,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.output_text.delta" def initialize: ( @@ -26,8 +29,18 @@ module OpenAI delta: String, item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.output_text.delta" ) -> void + + def to_hash: -> { + content_index: Integer, + delta: String, + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.output_text.delta" + } end end end diff --git a/sig/openai/models/responses/response_text_done_event.rbs b/sig/openai/models/responses/response_text_done_event.rbs index 0585e9f1..742fb5e3 100644 --- a/sig/openai/models/responses/response_text_done_event.rbs +++ b/sig/openai/models/responses/response_text_done_event.rbs @@ -6,6 +6,7 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, type: :"response.output_text.done" } @@ -17,6 +18,8 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor text: String attr_accessor type: :"response.output_text.done" @@ -25,9 +28,19 @@ module OpenAI content_index: Integer, item_id: String, output_index: Integer, + sequence_number: Integer, text: String, ?type: :"response.output_text.done" ) -> void + + def to_hash: -> { + content_index: Integer, + item_id: String, + output_index: Integer, + sequence_number: Integer, + text: String, + type: :"response.output_text.done" + } end end end diff --git a/sig/openai/models/responses/response_usage.rbs b/sig/openai/models/responses/response_usage.rbs index a5b8cbc5..2245372b 100644 --- a/sig/openai/models/responses/response_usage.rbs +++ b/sig/openai/models/responses/response_usage.rbs @@ -29,12 +29,22 @@ module OpenAI total_tokens: Integer ) -> void + def to_hash: -> { + input_tokens: Integer, + input_tokens_details: OpenAI::Responses::ResponseUsage::InputTokensDetails, + output_tokens: Integer, + output_tokens_details: OpenAI::Responses::ResponseUsage::OutputTokensDetails, + total_tokens: Integer + } + type input_tokens_details = { cached_tokens: Integer } class InputTokensDetails < OpenAI::Internal::Type::BaseModel attr_accessor cached_tokens: Integer def initialize: (cached_tokens: Integer) -> void + + def to_hash: -> { cached_tokens: Integer } end type output_tokens_details = { reasoning_tokens: Integer } @@ -43,6 +53,8 @@ module OpenAI attr_accessor reasoning_tokens: Integer def initialize: (reasoning_tokens: Integer) -> void + + def to_hash: -> { reasoning_tokens: Integer } end end end diff --git a/sig/openai/models/responses/response_web_search_call_completed_event.rbs b/sig/openai/models/responses/response_web_search_call_completed_event.rbs index 852a046e..66882df9 100644 --- a/sig/openai/models/responses/response_web_search_call_completed_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_completed_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.web_search_call.completed" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.web_search_call.completed" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.web_search_call.completed" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.web_search_call.completed" + } end end end diff --git a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs index 996acf2e..b2928e82 100644 --- a/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_in_progress_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.web_search_call.in_progress" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.web_search_call.in_progress" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.web_search_call.in_progress" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.web_search_call.in_progress" + } end end end diff --git a/sig/openai/models/responses/response_web_search_call_searching_event.rbs b/sig/openai/models/responses/response_web_search_call_searching_event.rbs index b77bc5aa..4c3a659f 100644 --- a/sig/openai/models/responses/response_web_search_call_searching_event.rbs +++ b/sig/openai/models/responses/response_web_search_call_searching_event.rbs @@ -5,6 +5,7 @@ module OpenAI { item_id: String, output_index: Integer, + sequence_number: Integer, type: :"response.web_search_call.searching" } @@ -13,13 +14,23 @@ module OpenAI attr_accessor output_index: Integer + attr_accessor sequence_number: Integer + attr_accessor type: :"response.web_search_call.searching" def initialize: ( item_id: String, output_index: Integer, + sequence_number: Integer, ?type: :"response.web_search_call.searching" ) -> void + + def to_hash: -> { + item_id: String, + output_index: Integer, + sequence_number: Integer, + type: :"response.web_search_call.searching" + } end end end diff --git a/sig/openai/models/responses/tool.rbs b/sig/openai/models/responses/tool.rbs index 81616de5..0655d5c6 100644 --- a/sig/openai/models/responses/tool.rbs +++ b/sig/openai/models/responses/tool.rbs @@ -2,14 +2,397 @@ module OpenAI module Models module Responses type tool = - OpenAI::Responses::FileSearchTool - | OpenAI::Responses::FunctionTool + OpenAI::Responses::FunctionTool + | OpenAI::Responses::FileSearchTool | OpenAI::Responses::ComputerTool + | OpenAI::Responses::Tool::Mcp + | OpenAI::Responses::Tool::CodeInterpreter + | OpenAI::Responses::Tool::ImageGeneration + | OpenAI::Responses::Tool::LocalShell | OpenAI::Responses::WebSearchTool module Tool extend OpenAI::Internal::Type::Union + type mcp = + { + server_label: String, + server_url: String, + type: :mcp, + allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, + headers: ::Hash[Symbol, String]?, + require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval? + } + + class Mcp < OpenAI::Internal::Type::BaseModel + attr_accessor server_label: String + + attr_accessor server_url: String + + attr_accessor type: :mcp + + attr_accessor allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools? + + attr_accessor headers: ::Hash[Symbol, String]? + + attr_accessor require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval? + + def initialize: ( + server_label: String, + server_url: String, + ?allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, + ?headers: ::Hash[Symbol, String]?, + ?require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval?, + ?type: :mcp + ) -> void + + def to_hash: -> { + server_label: String, + server_url: String, + type: :mcp, + allowed_tools: OpenAI::Models::Responses::Tool::Mcp::allowed_tools?, + headers: ::Hash[Symbol, String]?, + require_approval: OpenAI::Models::Responses::Tool::Mcp::require_approval? + } + + type allowed_tools = + ::Array[String] + | OpenAI::Responses::Tool::Mcp::AllowedTools::McpAllowedToolsFilter + + module AllowedTools + extend OpenAI::Internal::Type::Union + + type mcp_allowed_tools_filter = { tool_names: ::Array[String] } + + class McpAllowedToolsFilter < OpenAI::Internal::Type::BaseModel + attr_reader tool_names: ::Array[String]? + + def tool_names=: (::Array[String]) -> ::Array[String] + + def initialize: (?tool_names: ::Array[String]) -> void + + def to_hash: -> { tool_names: ::Array[String] } + end + + def self?.variants: -> ::Array[OpenAI::Models::Responses::Tool::Mcp::allowed_tools] + + StringArray: OpenAI::Internal::Type::Converter + end + + type require_approval = + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter + | OpenAI::Models::Responses::Tool::Mcp::RequireApproval::mcp_tool_approval_setting + + module RequireApproval + extend OpenAI::Internal::Type::Union + + type mcp_tool_approval_filter = + { + always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, + never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + } + + class McpToolApprovalFilter < OpenAI::Internal::Type::BaseModel + attr_reader always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always? + + def always=: ( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always + ) -> OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always + + attr_reader never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never? + + def never=: ( + OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + ) -> OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + + def initialize: ( + ?always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, + ?never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + ) -> void + + def to_hash: -> { + always: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Always, + never: OpenAI::Responses::Tool::Mcp::RequireApproval::McpToolApprovalFilter::Never + } + + type always = { tool_names: ::Array[String] } + + class Always < OpenAI::Internal::Type::BaseModel + attr_reader tool_names: ::Array[String]? + + def tool_names=: (::Array[String]) -> ::Array[String] + + def initialize: (?tool_names: ::Array[String]) -> void + + def to_hash: -> { tool_names: ::Array[String] } + end + + type never = { tool_names: ::Array[String] } + + class Never < OpenAI::Internal::Type::BaseModel + attr_reader tool_names: ::Array[String]? + + def tool_names=: (::Array[String]) -> ::Array[String] + + def initialize: (?tool_names: ::Array[String]) -> void + + def to_hash: -> { tool_names: ::Array[String] } + end + end + + type mcp_tool_approval_setting = :always | :never + + module McpToolApprovalSetting + extend OpenAI::Internal::Type::Enum + + ALWAYS: :always + NEVER: :never + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::Mcp::RequireApproval::mcp_tool_approval_setting] + end + + def self?.variants: -> ::Array[OpenAI::Models::Responses::Tool::Mcp::require_approval] + end + end + + type code_interpreter = + { + container: OpenAI::Models::Responses::Tool::CodeInterpreter::container, + type: :code_interpreter + } + + class CodeInterpreter < OpenAI::Internal::Type::BaseModel + attr_accessor container: OpenAI::Models::Responses::Tool::CodeInterpreter::container + + attr_accessor type: :code_interpreter + + def initialize: ( + container: OpenAI::Models::Responses::Tool::CodeInterpreter::container, + ?type: :code_interpreter + ) -> void + + def to_hash: -> { + container: OpenAI::Models::Responses::Tool::CodeInterpreter::container, + type: :code_interpreter + } + + type container = + String + | OpenAI::Responses::Tool::CodeInterpreter::Container::CodeInterpreterToolAuto + + module Container + extend OpenAI::Internal::Type::Union + + type code_interpreter_tool_auto = + { type: :auto, file_ids: ::Array[String] } + + class CodeInterpreterToolAuto < OpenAI::Internal::Type::BaseModel + attr_accessor type: :auto + + attr_reader file_ids: ::Array[String]? + + def file_ids=: (::Array[String]) -> ::Array[String] + + def initialize: (?file_ids: ::Array[String], ?type: :auto) -> void + + def to_hash: -> { type: :auto, file_ids: ::Array[String] } + end + + def self?.variants: -> ::Array[OpenAI::Models::Responses::Tool::CodeInterpreter::container] + end + end + + type image_generation = + { + type: :image_generation, + background: OpenAI::Models::Responses::Tool::ImageGeneration::background, + input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, + model: OpenAI::Models::Responses::Tool::ImageGeneration::model, + moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation, + output_compression: Integer, + output_format: OpenAI::Models::Responses::Tool::ImageGeneration::output_format, + partial_images: Integer, + quality: OpenAI::Models::Responses::Tool::ImageGeneration::quality, + size: OpenAI::Models::Responses::Tool::ImageGeneration::size + } + + class ImageGeneration < OpenAI::Internal::Type::BaseModel + attr_accessor type: :image_generation + + attr_reader background: OpenAI::Models::Responses::Tool::ImageGeneration::background? + + def background=: ( + OpenAI::Models::Responses::Tool::ImageGeneration::background + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::background + + attr_reader input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask? + + def input_image_mask=: ( + OpenAI::Responses::Tool::ImageGeneration::InputImageMask + ) -> OpenAI::Responses::Tool::ImageGeneration::InputImageMask + + attr_reader model: OpenAI::Models::Responses::Tool::ImageGeneration::model? + + def model=: ( + OpenAI::Models::Responses::Tool::ImageGeneration::model + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::model + + attr_reader moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation? + + def moderation=: ( + OpenAI::Models::Responses::Tool::ImageGeneration::moderation + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::moderation + + attr_reader output_compression: Integer? + + def output_compression=: (Integer) -> Integer + + attr_reader output_format: OpenAI::Models::Responses::Tool::ImageGeneration::output_format? + + def output_format=: ( + OpenAI::Models::Responses::Tool::ImageGeneration::output_format + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::output_format + + attr_reader partial_images: Integer? + + def partial_images=: (Integer) -> Integer + + attr_reader quality: OpenAI::Models::Responses::Tool::ImageGeneration::quality? + + def quality=: ( + OpenAI::Models::Responses::Tool::ImageGeneration::quality + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::quality + + attr_reader size: OpenAI::Models::Responses::Tool::ImageGeneration::size? + + def size=: ( + OpenAI::Models::Responses::Tool::ImageGeneration::size + ) -> OpenAI::Models::Responses::Tool::ImageGeneration::size + + def initialize: ( + ?background: OpenAI::Models::Responses::Tool::ImageGeneration::background, + ?input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, + ?model: OpenAI::Models::Responses::Tool::ImageGeneration::model, + ?moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation, + ?output_compression: Integer, + ?output_format: OpenAI::Models::Responses::Tool::ImageGeneration::output_format, + ?partial_images: Integer, + ?quality: OpenAI::Models::Responses::Tool::ImageGeneration::quality, + ?size: OpenAI::Models::Responses::Tool::ImageGeneration::size, + ?type: :image_generation + ) -> void + + def to_hash: -> { + type: :image_generation, + background: OpenAI::Models::Responses::Tool::ImageGeneration::background, + input_image_mask: OpenAI::Responses::Tool::ImageGeneration::InputImageMask, + model: OpenAI::Models::Responses::Tool::ImageGeneration::model, + moderation: OpenAI::Models::Responses::Tool::ImageGeneration::moderation, + output_compression: Integer, + output_format: OpenAI::Models::Responses::Tool::ImageGeneration::output_format, + partial_images: Integer, + quality: OpenAI::Models::Responses::Tool::ImageGeneration::quality, + size: OpenAI::Models::Responses::Tool::ImageGeneration::size + } + + type background = :transparent | :opaque | :auto + + module Background + extend OpenAI::Internal::Type::Enum + + TRANSPARENT: :transparent + OPAQUE: :opaque + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::background] + end + + type input_image_mask = { file_id: String, image_url: String } + + class InputImageMask < OpenAI::Internal::Type::BaseModel + attr_reader file_id: String? + + def file_id=: (String) -> String + + attr_reader image_url: String? + + def image_url=: (String) -> String + + def initialize: (?file_id: String, ?image_url: String) -> void + + def to_hash: -> { file_id: String, image_url: String } + end + + type model = :"gpt-image-1" + + module Model + extend OpenAI::Internal::Type::Enum + + GPT_IMAGE_1: :"gpt-image-1" + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::model] + end + + type moderation = :auto | :low + + module Moderation + extend OpenAI::Internal::Type::Enum + + AUTO: :auto + LOW: :low + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::moderation] + end + + type output_format = :png | :webp | :jpeg + + module OutputFormat + extend OpenAI::Internal::Type::Enum + + PNG: :png + WEBP: :webp + JPEG: :jpeg + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::output_format] + end + + type quality = :low | :medium | :high | :auto + + module Quality + extend OpenAI::Internal::Type::Enum + + LOW: :low + MEDIUM: :medium + HIGH: :high + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::quality] + end + + type size = :"1024x1024" | :"1024x1536" | :"1536x1024" | :auto + + module Size + extend OpenAI::Internal::Type::Enum + + SIZE_1024X1024: :"1024x1024" + SIZE_1024X1536: :"1024x1536" + SIZE_1536X1024: :"1536x1024" + AUTO: :auto + + def self?.values: -> ::Array[OpenAI::Models::Responses::Tool::ImageGeneration::size] + end + end + + type local_shell = { type: :local_shell } + + class LocalShell < OpenAI::Internal::Type::BaseModel + attr_accessor type: :local_shell + + def initialize: (?type: :local_shell) -> void + + def to_hash: -> { type: :local_shell } + end + def self?.variants: -> ::Array[OpenAI::Models::Responses::tool] end end diff --git a/sig/openai/models/responses/tool_choice_function.rbs b/sig/openai/models/responses/tool_choice_function.rbs index 1aa68ba1..10aa7372 100644 --- a/sig/openai/models/responses/tool_choice_function.rbs +++ b/sig/openai/models/responses/tool_choice_function.rbs @@ -9,6 +9,8 @@ module OpenAI attr_accessor type: :function def initialize: (name: String, ?type: :function) -> void + + def to_hash: -> { name: String, type: :function } end end end diff --git a/sig/openai/models/responses/tool_choice_types.rbs b/sig/openai/models/responses/tool_choice_types.rbs index 71458e63..5b7a418e 100644 --- a/sig/openai/models/responses/tool_choice_types.rbs +++ b/sig/openai/models/responses/tool_choice_types.rbs @@ -2,20 +2,27 @@ module OpenAI module Models module Responses type tool_choice_types = - { type: OpenAI::Responses::ToolChoiceTypes::type_ } + { type: OpenAI::Models::Responses::ToolChoiceTypes::type_ } class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Responses::ToolChoiceTypes::type_ + attr_accessor type: OpenAI::Models::Responses::ToolChoiceTypes::type_ def initialize: ( - type: OpenAI::Responses::ToolChoiceTypes::type_ + type: OpenAI::Models::Responses::ToolChoiceTypes::type_ ) -> void + def to_hash: -> { + type: OpenAI::Models::Responses::ToolChoiceTypes::type_ + } + type type_ = :file_search | :web_search_preview | :computer_use_preview | :web_search_preview_2025_03_11 + | :image_generation + | :code_interpreter + | :mcp module Type extend OpenAI::Internal::Type::Enum @@ -24,8 +31,11 @@ module OpenAI WEB_SEARCH_PREVIEW: :web_search_preview COMPUTER_USE_PREVIEW: :computer_use_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 + IMAGE_GENERATION: :image_generation + CODE_INTERPRETER: :code_interpreter + MCP: :mcp - def self?.values: -> ::Array[OpenAI::Responses::ToolChoiceTypes::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::ToolChoiceTypes::type_] end end end diff --git a/sig/openai/models/responses/web_search_tool.rbs b/sig/openai/models/responses/web_search_tool.rbs index dba0454c..3812d3bb 100644 --- a/sig/openai/models/responses/web_search_tool.rbs +++ b/sig/openai/models/responses/web_search_tool.rbs @@ -3,28 +3,34 @@ module OpenAI module Responses type web_search_tool = { - type: OpenAI::Responses::WebSearchTool::type_, - search_context_size: OpenAI::Responses::WebSearchTool::search_context_size, + type: OpenAI::Models::Responses::WebSearchTool::type_, + search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, user_location: OpenAI::Responses::WebSearchTool::UserLocation? } class WebSearchTool < OpenAI::Internal::Type::BaseModel - attr_accessor type: OpenAI::Responses::WebSearchTool::type_ + attr_accessor type: OpenAI::Models::Responses::WebSearchTool::type_ - attr_reader search_context_size: OpenAI::Responses::WebSearchTool::search_context_size? + attr_reader search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size? def search_context_size=: ( - OpenAI::Responses::WebSearchTool::search_context_size - ) -> OpenAI::Responses::WebSearchTool::search_context_size + OpenAI::Models::Responses::WebSearchTool::search_context_size + ) -> OpenAI::Models::Responses::WebSearchTool::search_context_size attr_accessor user_location: OpenAI::Responses::WebSearchTool::UserLocation? def initialize: ( - type: OpenAI::Responses::WebSearchTool::type_, - ?search_context_size: OpenAI::Responses::WebSearchTool::search_context_size, + type: OpenAI::Models::Responses::WebSearchTool::type_, + ?search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, ?user_location: OpenAI::Responses::WebSearchTool::UserLocation? ) -> void + def to_hash: -> { + type: OpenAI::Models::Responses::WebSearchTool::type_, + search_context_size: OpenAI::Models::Responses::WebSearchTool::search_context_size, + user_location: OpenAI::Responses::WebSearchTool::UserLocation? + } + type type_ = :web_search_preview | :web_search_preview_2025_03_11 module Type @@ -33,7 +39,7 @@ module OpenAI WEB_SEARCH_PREVIEW: :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11: :web_search_preview_2025_03_11 - def self?.values: -> ::Array[OpenAI::Responses::WebSearchTool::type_] + def self?.values: -> ::Array[OpenAI::Models::Responses::WebSearchTool::type_] end type search_context_size = :low | :medium | :high @@ -45,7 +51,7 @@ module OpenAI MEDIUM: :medium HIGH: :high - def self?.values: -> ::Array[OpenAI::Responses::WebSearchTool::search_context_size] + def self?.values: -> ::Array[OpenAI::Models::Responses::WebSearchTool::search_context_size] end type user_location = @@ -75,6 +81,14 @@ module OpenAI ?timezone: String?, ?type: :approximate ) -> void + + def to_hash: -> { + type: :approximate, + city: String?, + country: String?, + region: String?, + timezone: String? + } end end end diff --git a/sig/openai/models/responses_model.rbs b/sig/openai/models/responses_model.rbs index 42a66641..3014c03a 100644 --- a/sig/openai/models/responses_model.rbs +++ b/sig/openai/models/responses_model.rbs @@ -3,7 +3,7 @@ module OpenAI type responses_model = String | OpenAI::Models::chat_model - | OpenAI::ResponsesModel::responses_only_model + | OpenAI::Models::ResponsesModel::responses_only_model module ResponsesModel extend OpenAI::Internal::Type::Union @@ -22,7 +22,7 @@ module OpenAI COMPUTER_USE_PREVIEW: :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11: :"computer-use-preview-2025-03-11" - def self?.values: -> ::Array[OpenAI::ResponsesModel::responses_only_model] + def self?.values: -> ::Array[OpenAI::Models::ResponsesModel::responses_only_model] end def self?.variants: -> ::Array[OpenAI::Models::responses_model] diff --git a/sig/openai/models/static_file_chunking_strategy.rbs b/sig/openai/models/static_file_chunking_strategy.rbs index 0ba93ae4..1d8dee49 100644 --- a/sig/openai/models/static_file_chunking_strategy.rbs +++ b/sig/openai/models/static_file_chunking_strategy.rbs @@ -12,6 +12,11 @@ module OpenAI chunk_overlap_tokens: Integer, max_chunk_size_tokens: Integer ) -> void + + def to_hash: -> { + chunk_overlap_tokens: Integer, + max_chunk_size_tokens: Integer + } end end end diff --git a/sig/openai/models/static_file_chunking_strategy_object.rbs b/sig/openai/models/static_file_chunking_strategy_object.rbs index 6b2d231e..e65aa3db 100644 --- a/sig/openai/models/static_file_chunking_strategy_object.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object.rbs @@ -12,6 +12,11 @@ module OpenAI static: OpenAI::StaticFileChunkingStrategy, ?type: :static ) -> void + + def to_hash: -> { + static: OpenAI::StaticFileChunkingStrategy, + type: :static + } end end end diff --git a/sig/openai/models/static_file_chunking_strategy_object_param.rbs b/sig/openai/models/static_file_chunking_strategy_object_param.rbs index c0f5182c..a722310f 100644 --- a/sig/openai/models/static_file_chunking_strategy_object_param.rbs +++ b/sig/openai/models/static_file_chunking_strategy_object_param.rbs @@ -12,6 +12,11 @@ module OpenAI static: OpenAI::StaticFileChunkingStrategy, ?type: :static ) -> void + + def to_hash: -> { + static: OpenAI::StaticFileChunkingStrategy, + type: :static + } end end end diff --git a/sig/openai/models/upload.rbs b/sig/openai/models/upload.rbs index 955ba4fe..01762451 100644 --- a/sig/openai/models/upload.rbs +++ b/sig/openai/models/upload.rbs @@ -9,7 +9,7 @@ module OpenAI filename: String, object: :upload, purpose: String, - status: OpenAI::Upload::status, + status: OpenAI::Models::Upload::status, file: OpenAI::FileObject? } @@ -28,7 +28,7 @@ module OpenAI attr_accessor purpose: String - attr_accessor status: OpenAI::Upload::status + attr_accessor status: OpenAI::Models::Upload::status attr_accessor file: OpenAI::FileObject? @@ -39,11 +39,23 @@ module OpenAI expires_at: Integer, filename: String, purpose: String, - status: OpenAI::Upload::status, + status: OpenAI::Models::Upload::status, ?file: OpenAI::FileObject?, ?object: :upload ) -> void + def to_hash: -> { + id: String, + bytes: Integer, + created_at: Integer, + expires_at: Integer, + filename: String, + object: :upload, + purpose: String, + status: OpenAI::Models::Upload::status, + file: OpenAI::FileObject? + } + type status = :pending | :completed | :cancelled | :expired module Status @@ -54,7 +66,7 @@ module OpenAI CANCELLED: :cancelled EXPIRED: :expired - def self?.values: -> ::Array[OpenAI::Upload::status] + def self?.values: -> ::Array[OpenAI::Models::Upload::status] end end end diff --git a/sig/openai/models/upload_cancel_params.rbs b/sig/openai/models/upload_cancel_params.rbs index c5cb5b12..92040ad5 100644 --- a/sig/openai/models/upload_cancel_params.rbs +++ b/sig/openai/models/upload_cancel_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/upload_complete_params.rbs b/sig/openai/models/upload_complete_params.rbs index 4bdf3d87..c223bb70 100644 --- a/sig/openai/models/upload_complete_params.rbs +++ b/sig/openai/models/upload_complete_params.rbs @@ -19,6 +19,12 @@ module OpenAI ?md5: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + part_ids: ::Array[String], + :md5 => String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/upload_create_params.rbs b/sig/openai/models/upload_create_params.rbs index dce638fe..85f45a52 100644 --- a/sig/openai/models/upload_create_params.rbs +++ b/sig/openai/models/upload_create_params.rbs @@ -28,6 +28,14 @@ module OpenAI purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + bytes: Integer, + filename: String, + mime_type: String, + purpose: OpenAI::Models::file_purpose, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/uploads/part_create_params.rbs b/sig/openai/models/uploads/part_create_params.rbs index 7a94c0ae..deeec480 100644 --- a/sig/openai/models/uploads/part_create_params.rbs +++ b/sig/openai/models/uploads/part_create_params.rbs @@ -2,19 +2,24 @@ module OpenAI module Models module Uploads type part_create_params = - { data: (Pathname | StringIO | IO | OpenAI::FilePart) } + { data: OpenAI::Internal::file_input } & OpenAI::Internal::Type::request_parameters class PartCreateParams < OpenAI::Internal::Type::BaseModel extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor data: Pathname | StringIO | IO | OpenAI::FilePart + attr_accessor data: OpenAI::Internal::file_input def initialize: ( - data: Pathname | StringIO | IO | OpenAI::FilePart, + data: OpenAI::Internal::file_input, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + data: OpenAI::Internal::file_input, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/uploads/upload_part.rbs b/sig/openai/models/uploads/upload_part.rbs index 17613909..60554e85 100644 --- a/sig/openai/models/uploads/upload_part.rbs +++ b/sig/openai/models/uploads/upload_part.rbs @@ -26,6 +26,13 @@ module OpenAI upload_id: String, ?object: :"upload.part" ) -> void + + def to_hash: -> { + id: String, + created_at: Integer, + object: :"upload.part", + upload_id: String + } end end end diff --git a/sig/openai/models/vector_store.rbs b/sig/openai/models/vector_store.rbs index 2fc80760..24d34d54 100644 --- a/sig/openai/models/vector_store.rbs +++ b/sig/openai/models/vector_store.rbs @@ -9,9 +9,9 @@ module OpenAI metadata: OpenAI::Models::metadata?, name: String, object: :vector_store, - status: OpenAI::VectorStore::status, + status: OpenAI::Models::VectorStore::status, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter, + expires_after: OpenAI::VectorStore::ExpiresAfter, expires_at: Integer? } @@ -30,15 +30,15 @@ module OpenAI attr_accessor object: :vector_store - attr_accessor status: OpenAI::VectorStore::status + attr_accessor status: OpenAI::Models::VectorStore::status attr_accessor usage_bytes: Integer - attr_reader expires_after: OpenAI::VectorStoreExpirationAfter? + attr_reader expires_after: OpenAI::VectorStore::ExpiresAfter? def expires_after=: ( - OpenAI::VectorStoreExpirationAfter - ) -> OpenAI::VectorStoreExpirationAfter + OpenAI::VectorStore::ExpiresAfter + ) -> OpenAI::VectorStore::ExpiresAfter attr_accessor expires_at: Integer? @@ -49,13 +49,27 @@ module OpenAI last_active_at: Integer?, metadata: OpenAI::Models::metadata?, name: String, - status: OpenAI::VectorStore::status, + status: OpenAI::Models::VectorStore::status, usage_bytes: Integer, - ?expires_after: OpenAI::VectorStoreExpirationAfter, + ?expires_after: OpenAI::VectorStore::ExpiresAfter, ?expires_at: Integer?, ?object: :vector_store ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + file_counts: OpenAI::VectorStore::FileCounts, + last_active_at: Integer?, + metadata: OpenAI::Models::metadata?, + name: String, + object: :vector_store, + status: OpenAI::Models::VectorStore::status, + usage_bytes: Integer, + expires_after: OpenAI::VectorStore::ExpiresAfter, + expires_at: Integer? + } + type file_counts = { cancelled: Integer, @@ -83,6 +97,14 @@ module OpenAI in_progress: Integer, total: Integer ) -> void + + def to_hash: -> { + cancelled: Integer, + completed: Integer, + failed: Integer, + in_progress: Integer, + total: Integer + } end type status = :expired | :in_progress | :completed @@ -94,7 +116,19 @@ module OpenAI IN_PROGRESS: :in_progress COMPLETED: :completed - def self?.values: -> ::Array[OpenAI::VectorStore::status] + def self?.values: -> ::Array[OpenAI::Models::VectorStore::status] + end + + type expires_after = { anchor: :last_active_at, days: Integer } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: :last_active_at + + attr_accessor days: Integer + + def initialize: (days: Integer, ?anchor: :last_active_at) -> void + + def to_hash: -> { anchor: :last_active_at, days: Integer } end end end diff --git a/sig/openai/models/vector_store_create_params.rbs b/sig/openai/models/vector_store_create_params.rbs index 151f9abf..d5c48eb6 100644 --- a/sig/openai/models/vector_store_create_params.rbs +++ b/sig/openai/models/vector_store_create_params.rbs @@ -3,7 +3,7 @@ module OpenAI type vector_store_create_params = { chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - expires_after: OpenAI::VectorStoreExpirationAfter, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, file_ids: ::Array[String], metadata: OpenAI::Models::metadata?, name: String @@ -20,11 +20,11 @@ module OpenAI OpenAI::Models::file_chunking_strategy_param ) -> OpenAI::Models::file_chunking_strategy_param - attr_reader expires_after: OpenAI::VectorStoreExpirationAfter? + attr_reader expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter? def expires_after=: ( - OpenAI::VectorStoreExpirationAfter - ) -> OpenAI::VectorStoreExpirationAfter + OpenAI::VectorStoreCreateParams::ExpiresAfter + ) -> OpenAI::VectorStoreCreateParams::ExpiresAfter attr_reader file_ids: ::Array[String]? @@ -38,12 +38,33 @@ module OpenAI def initialize: ( ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?expires_after: OpenAI::VectorStoreExpirationAfter, + ?expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, + file_ids: ::Array[String], + metadata: OpenAI::Models::metadata?, + name: String, + request_options: OpenAI::RequestOptions + } + + type expires_after = { anchor: :last_active_at, days: Integer } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: :last_active_at + + attr_accessor days: Integer + + def initialize: (days: Integer, ?anchor: :last_active_at) -> void + + def to_hash: -> { anchor: :last_active_at, days: Integer } + end end end end diff --git a/sig/openai/models/vector_store_delete_params.rbs b/sig/openai/models/vector_store_delete_params.rbs index 89da672f..d64b7288 100644 --- a/sig/openai/models/vector_store_delete_params.rbs +++ b/sig/openai/models/vector_store_delete_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/vector_store_deleted.rbs b/sig/openai/models/vector_store_deleted.rbs index 8bc51626..1ca36ab4 100644 --- a/sig/openai/models/vector_store_deleted.rbs +++ b/sig/openai/models/vector_store_deleted.rbs @@ -15,6 +15,12 @@ module OpenAI deleted: bool, ?object: :"vector_store.deleted" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"vector_store.deleted" + } end end end diff --git a/sig/openai/models/vector_store_expiration_after.rbs b/sig/openai/models/vector_store_expiration_after.rbs deleted file mode 100644 index 3521d748..00000000 --- a/sig/openai/models/vector_store_expiration_after.rbs +++ /dev/null @@ -1,14 +0,0 @@ -module OpenAI - module Models - type vector_store_expiration_after = - { anchor: :last_active_at, days: Integer } - - class VectorStoreExpirationAfter < OpenAI::Internal::Type::BaseModel - attr_accessor anchor: :last_active_at - - attr_accessor days: Integer - - def initialize: (days: Integer, ?anchor: :last_active_at) -> void - end - end -end diff --git a/sig/openai/models/vector_store_list_params.rbs b/sig/openai/models/vector_store_list_params.rbs index ed5b3079..dfbe777d 100644 --- a/sig/openai/models/vector_store_list_params.rbs +++ b/sig/openai/models/vector_store_list_params.rbs @@ -39,6 +39,14 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + limit: Integer, + order: OpenAI::Models::VectorStoreListParams::order, + request_options: OpenAI::RequestOptions + } + type order = :asc | :desc module Order diff --git a/sig/openai/models/vector_store_retrieve_params.rbs b/sig/openai/models/vector_store_retrieve_params.rbs index 6b466528..92c244d4 100644 --- a/sig/openai/models/vector_store_retrieve_params.rbs +++ b/sig/openai/models/vector_store_retrieve_params.rbs @@ -8,6 +8,8 @@ module OpenAI include OpenAI::Internal::Type::RequestParameters def initialize: (?request_options: OpenAI::request_opts) -> void + + def to_hash: -> { request_options: OpenAI::RequestOptions } end end end diff --git a/sig/openai/models/vector_store_search_params.rbs b/sig/openai/models/vector_store_search_params.rbs index 863ea4ab..0ad7493a 100644 --- a/sig/openai/models/vector_store_search_params.rbs +++ b/sig/openai/models/vector_store_search_params.rbs @@ -45,6 +45,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + query: OpenAI::Models::VectorStoreSearchParams::query, + filters: OpenAI::Models::VectorStoreSearchParams::filters, + max_num_results: Integer, + ranking_options: OpenAI::VectorStoreSearchParams::RankingOptions, + rewrite_query: bool, + request_options: OpenAI::RequestOptions + } + type query = String | ::Array[String] module Query @@ -65,26 +74,31 @@ module OpenAI type ranking_options = { - ranker: OpenAI::VectorStoreSearchParams::RankingOptions::ranker, + ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, score_threshold: Float } class RankingOptions < OpenAI::Internal::Type::BaseModel - attr_reader ranker: OpenAI::VectorStoreSearchParams::RankingOptions::ranker? + attr_reader ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker? def ranker=: ( - OpenAI::VectorStoreSearchParams::RankingOptions::ranker - ) -> OpenAI::VectorStoreSearchParams::RankingOptions::ranker + OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker + ) -> OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker attr_reader score_threshold: Float? def score_threshold=: (Float) -> Float def initialize: ( - ?ranker: OpenAI::VectorStoreSearchParams::RankingOptions::ranker, + ?ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, ?score_threshold: Float ) -> void + def to_hash: -> { + ranker: OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker, + score_threshold: Float + } + type ranker = :auto | :"default-2024-11-15" module Ranker @@ -93,7 +107,7 @@ module OpenAI AUTO: :auto DEFAULT_2024_11_15: :"default-2024-11-15" - def self?.values: -> ::Array[OpenAI::VectorStoreSearchParams::RankingOptions::ranker] + def self?.values: -> ::Array[OpenAI::Models::VectorStoreSearchParams::RankingOptions::ranker] end end end diff --git a/sig/openai/models/vector_store_search_response.rbs b/sig/openai/models/vector_store_search_response.rbs index 2c977b03..3ce0a392 100644 --- a/sig/openai/models/vector_store_search_response.rbs +++ b/sig/openai/models/vector_store_search_response.rbs @@ -28,6 +28,14 @@ module OpenAI score: Float ) -> void + def to_hash: -> { + attributes: ::Hash[Symbol, OpenAI::Models::VectorStoreSearchResponse::attribute]?, + content: ::Array[OpenAI::Models::VectorStoreSearchResponse::Content], + file_id: String, + filename: String, + score: Float + } + type attribute = String | Float | bool module Attribute @@ -52,6 +60,11 @@ module OpenAI type: OpenAI::Models::VectorStoreSearchResponse::Content::type_ ) -> void + def to_hash: -> { + text: String, + type: OpenAI::Models::VectorStoreSearchResponse::Content::type_ + } + type type_ = :text module Type diff --git a/sig/openai/models/vector_store_update_params.rbs b/sig/openai/models/vector_store_update_params.rbs index f86edba5..4faf2804 100644 --- a/sig/openai/models/vector_store_update_params.rbs +++ b/sig/openai/models/vector_store_update_params.rbs @@ -2,7 +2,7 @@ module OpenAI module Models type vector_store_update_params = { - expires_after: OpenAI::VectorStoreExpirationAfter?, + expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, metadata: OpenAI::Models::metadata?, name: String? } @@ -12,18 +12,37 @@ module OpenAI extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - attr_accessor expires_after: OpenAI::VectorStoreExpirationAfter? + attr_accessor expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter? attr_accessor metadata: OpenAI::Models::metadata? attr_accessor name: String? def initialize: ( - ?expires_after: OpenAI::VectorStoreExpirationAfter?, + ?expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, ?metadata: OpenAI::Models::metadata?, ?name: String?, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, + metadata: OpenAI::Models::metadata?, + name: String?, + request_options: OpenAI::RequestOptions + } + + type expires_after = { anchor: :last_active_at, days: Integer } + + class ExpiresAfter < OpenAI::Internal::Type::BaseModel + attr_accessor anchor: :last_active_at + + attr_accessor days: Integer + + def initialize: (days: Integer, ?anchor: :last_active_at) -> void + + def to_hash: -> { anchor: :last_active_at, days: Integer } + end end end end diff --git a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs index 0ee2d4d8..22b94cc4 100644 --- a/sig/openai/models/vector_stores/file_batch_cancel_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_cancel_params.rbs @@ -14,6 +14,11 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + vector_store_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/vector_stores/file_batch_create_params.rbs b/sig/openai/models/vector_stores/file_batch_create_params.rbs index 3715958c..b9eca2b5 100644 --- a/sig/openai/models/vector_stores/file_batch_create_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_create_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_batch_create_params = { file_ids: ::Array[String], - attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy_param } & OpenAI::Internal::Type::request_parameters @@ -15,7 +15,7 @@ module OpenAI attr_accessor file_ids: ::Array[String] - attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]? attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy_param? @@ -25,17 +25,24 @@ module OpenAI def initialize: ( file_ids: ::Array[String], - ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + file_ids: ::Array[String], + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, + chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + request_options: OpenAI::RequestOptions + } + type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::VectorStores::FileBatchCreateParams::attribute] + def self?.variants: -> ::Array[OpenAI::Models::VectorStores::FileBatchCreateParams::attribute] end end end diff --git a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs index 918e28f0..92bc31fb 100644 --- a/sig/openai/models/vector_stores/file_batch_list_files_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_list_files_params.rbs @@ -52,6 +52,16 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + vector_store_id: String, + after: String, + before: String, + filter: OpenAI::Models::VectorStores::FileBatchListFilesParams::filter, + limit: Integer, + order: OpenAI::Models::VectorStores::FileBatchListFilesParams::order, + request_options: OpenAI::RequestOptions + } + type filter = :in_progress | :completed | :failed | :cancelled module Filter diff --git a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs index 1e21e9d1..3f2ee92a 100644 --- a/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_batch_retrieve_params.rbs @@ -14,6 +14,11 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + vector_store_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/vector_stores/file_content_params.rbs b/sig/openai/models/vector_stores/file_content_params.rbs index 5d35fcf1..1797ebf6 100644 --- a/sig/openai/models/vector_stores/file_content_params.rbs +++ b/sig/openai/models/vector_stores/file_content_params.rbs @@ -14,6 +14,11 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + vector_store_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/vector_stores/file_content_response.rbs b/sig/openai/models/vector_stores/file_content_response.rbs index 23306b97..200ec37f 100644 --- a/sig/openai/models/vector_stores/file_content_response.rbs +++ b/sig/openai/models/vector_stores/file_content_response.rbs @@ -13,6 +13,8 @@ module OpenAI def type=: (String) -> String def initialize: (?text: String, ?type: String) -> void + + def to_hash: -> { text: String, type: String } end end end diff --git a/sig/openai/models/vector_stores/file_create_params.rbs b/sig/openai/models/vector_stores/file_create_params.rbs index 29469ef1..b5505515 100644 --- a/sig/openai/models/vector_stores/file_create_params.rbs +++ b/sig/openai/models/vector_stores/file_create_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_create_params = { file_id: String, - attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy_param } & OpenAI::Internal::Type::request_parameters @@ -15,7 +15,7 @@ module OpenAI attr_accessor file_id: String - attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]? attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy_param? @@ -25,17 +25,24 @@ module OpenAI def initialize: ( file_id: String, - ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + file_id: String, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, + chunking_strategy: OpenAI::Models::file_chunking_strategy_param, + request_options: OpenAI::RequestOptions + } + type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::VectorStores::FileCreateParams::attribute] + def self?.variants: -> ::Array[OpenAI::Models::VectorStores::FileCreateParams::attribute] end end end diff --git a/sig/openai/models/vector_stores/file_delete_params.rbs b/sig/openai/models/vector_stores/file_delete_params.rbs index 5fc9986b..06e77cba 100644 --- a/sig/openai/models/vector_stores/file_delete_params.rbs +++ b/sig/openai/models/vector_stores/file_delete_params.rbs @@ -14,6 +14,11 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + vector_store_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/vector_stores/file_list_params.rbs b/sig/openai/models/vector_stores/file_list_params.rbs index 623629d4..45e9fc25 100644 --- a/sig/openai/models/vector_stores/file_list_params.rbs +++ b/sig/openai/models/vector_stores/file_list_params.rbs @@ -48,6 +48,15 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + after: String, + before: String, + filter: OpenAI::Models::VectorStores::FileListParams::filter, + limit: Integer, + order: OpenAI::Models::VectorStores::FileListParams::order, + request_options: OpenAI::RequestOptions + } + type filter = :in_progress | :completed | :failed | :cancelled module Filter diff --git a/sig/openai/models/vector_stores/file_retrieve_params.rbs b/sig/openai/models/vector_stores/file_retrieve_params.rbs index d46822ed..c0cac542 100644 --- a/sig/openai/models/vector_stores/file_retrieve_params.rbs +++ b/sig/openai/models/vector_stores/file_retrieve_params.rbs @@ -14,6 +14,11 @@ module OpenAI vector_store_id: String, ?request_options: OpenAI::request_opts ) -> void + + def to_hash: -> { + vector_store_id: String, + request_options: OpenAI::RequestOptions + } end end end diff --git a/sig/openai/models/vector_stores/file_update_params.rbs b/sig/openai/models/vector_stores/file_update_params.rbs index 58700418..83729e3e 100644 --- a/sig/openai/models/vector_stores/file_update_params.rbs +++ b/sig/openai/models/vector_stores/file_update_params.rbs @@ -4,7 +4,7 @@ module OpenAI type file_update_params = { vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]? + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]? } & OpenAI::Internal::Type::request_parameters @@ -14,20 +14,26 @@ module OpenAI attr_accessor vector_store_id: String - attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]? def initialize: ( vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, ?request_options: OpenAI::request_opts ) -> void + def to_hash: -> { + vector_store_id: String, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, + request_options: OpenAI::RequestOptions + } + type attribute = String | Float | bool module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::VectorStores::FileUpdateParams::attribute] + def self?.variants: -> ::Array[OpenAI::Models::VectorStores::FileUpdateParams::attribute] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file.rbs b/sig/openai/models/vector_stores/vector_store_file.rbs index 5bbb1310..3c64122d 100644 --- a/sig/openai/models/vector_stores/vector_store_file.rbs +++ b/sig/openai/models/vector_stores/vector_store_file.rbs @@ -9,10 +9,10 @@ module OpenAI created_at: Integer, last_error: OpenAI::VectorStores::VectorStoreFile::LastError?, object: :"vector_store.file", - status: OpenAI::VectorStores::VectorStoreFile::status, + status: OpenAI::Models::VectorStores::VectorStoreFile::status, usage_bytes: Integer, vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::VectorStores::VectorStoreFile::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, chunking_strategy: OpenAI::Models::file_chunking_strategy } @@ -25,13 +25,13 @@ module OpenAI attr_accessor object: :"vector_store.file" - attr_accessor status: OpenAI::VectorStores::VectorStoreFile::status + attr_accessor status: OpenAI::Models::VectorStores::VectorStoreFile::status attr_accessor usage_bytes: Integer attr_accessor vector_store_id: String - attr_accessor attributes: ::Hash[Symbol, OpenAI::VectorStores::VectorStoreFile::attribute]? + attr_accessor attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]? attr_reader chunking_strategy: OpenAI::Models::file_chunking_strategy? @@ -43,30 +43,47 @@ module OpenAI id: String, created_at: Integer, last_error: OpenAI::VectorStores::VectorStoreFile::LastError?, - status: OpenAI::VectorStores::VectorStoreFile::status, + status: OpenAI::Models::VectorStores::VectorStoreFile::status, usage_bytes: Integer, vector_store_id: String, - ?attributes: ::Hash[Symbol, OpenAI::VectorStores::VectorStoreFile::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy, ?object: :"vector_store.file" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + last_error: OpenAI::VectorStores::VectorStoreFile::LastError?, + object: :"vector_store.file", + status: OpenAI::Models::VectorStores::VectorStoreFile::status, + usage_bytes: Integer, + vector_store_id: String, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::VectorStoreFile::attribute]?, + chunking_strategy: OpenAI::Models::file_chunking_strategy + } + type last_error = { - code: OpenAI::VectorStores::VectorStoreFile::LastError::code, + code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code, message: String } class LastError < OpenAI::Internal::Type::BaseModel - attr_accessor code: OpenAI::VectorStores::VectorStoreFile::LastError::code + attr_accessor code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code attr_accessor message: String def initialize: ( - code: OpenAI::VectorStores::VectorStoreFile::LastError::code, + code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code, message: String ) -> void + def to_hash: -> { + code: OpenAI::Models::VectorStores::VectorStoreFile::LastError::code, + message: String + } + type code = :server_error | :unsupported_file | :invalid_file module Code @@ -76,7 +93,7 @@ module OpenAI UNSUPPORTED_FILE: :unsupported_file INVALID_FILE: :invalid_file - def self?.values: -> ::Array[OpenAI::VectorStores::VectorStoreFile::LastError::code] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::LastError::code] end end @@ -90,7 +107,7 @@ module OpenAI CANCELLED: :cancelled FAILED: :failed - def self?.values: -> ::Array[OpenAI::VectorStores::VectorStoreFile::status] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::status] end type attribute = String | Float | bool @@ -98,7 +115,7 @@ module OpenAI module Attribute extend OpenAI::Internal::Type::Union - def self?.variants: -> ::Array[OpenAI::VectorStores::VectorStoreFile::attribute] + def self?.variants: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFile::attribute] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file_batch.rbs b/sig/openai/models/vector_stores/vector_store_file_batch.rbs index b21e96bd..6ad78bb0 100644 --- a/sig/openai/models/vector_stores/vector_store_file_batch.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_batch.rbs @@ -9,7 +9,7 @@ module OpenAI created_at: Integer, file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, object: :"vector_store.files_batch", - status: OpenAI::VectorStores::VectorStoreFileBatch::status, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, vector_store_id: String } @@ -22,7 +22,7 @@ module OpenAI attr_accessor object: :"vector_store.files_batch" - attr_accessor status: OpenAI::VectorStores::VectorStoreFileBatch::status + attr_accessor status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status attr_accessor vector_store_id: String @@ -30,11 +30,20 @@ module OpenAI id: String, created_at: Integer, file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, - status: OpenAI::VectorStores::VectorStoreFileBatch::status, + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, vector_store_id: String, ?object: :"vector_store.files_batch" ) -> void + def to_hash: -> { + id: String, + created_at: Integer, + file_counts: OpenAI::VectorStores::VectorStoreFileBatch::FileCounts, + object: :"vector_store.files_batch", + status: OpenAI::Models::VectorStores::VectorStoreFileBatch::status, + vector_store_id: String + } + type file_counts = { cancelled: Integer, @@ -62,6 +71,14 @@ module OpenAI in_progress: Integer, total: Integer ) -> void + + def to_hash: -> { + cancelled: Integer, + completed: Integer, + failed: Integer, + in_progress: Integer, + total: Integer + } end type status = :in_progress | :completed | :cancelled | :failed @@ -74,7 +91,7 @@ module OpenAI CANCELLED: :cancelled FAILED: :failed - def self?.values: -> ::Array[OpenAI::VectorStores::VectorStoreFileBatch::status] + def self?.values: -> ::Array[OpenAI::Models::VectorStores::VectorStoreFileBatch::status] end end end diff --git a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs index d9d9038a..235b13ae 100644 --- a/sig/openai/models/vector_stores/vector_store_file_deleted.rbs +++ b/sig/openai/models/vector_stores/vector_store_file_deleted.rbs @@ -18,6 +18,12 @@ module OpenAI deleted: bool, ?object: :"vector_store.file.deleted" ) -> void + + def to_hash: -> { + id: String, + deleted: bool, + object: :"vector_store.file.deleted" + } end end end diff --git a/sig/openai/resources/audio/transcriptions.rbs b/sig/openai/resources/audio/transcriptions.rbs index 0d72b943..0130f147 100644 --- a/sig/openai/resources/audio/transcriptions.rbs +++ b/sig/openai/resources/audio/transcriptions.rbs @@ -3,7 +3,7 @@ module OpenAI class Audio class Transcriptions def create: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, ?include: ::Array[OpenAI::Models::Audio::transcription_include], @@ -11,12 +11,12 @@ module OpenAI ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity], + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], ?request_options: OpenAI::request_opts ) -> OpenAI::Models::Audio::transcription_create_response def create_streaming: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranscriptionCreateParams::model, ?chunking_strategy: OpenAI::Models::Audio::TranscriptionCreateParams::chunking_strategy?, ?include: ::Array[OpenAI::Models::Audio::transcription_include], @@ -24,7 +24,7 @@ module OpenAI ?prompt: String, ?response_format: OpenAI::Models::audio_response_format, ?temperature: Float, - ?timestamp_granularities: ::Array[OpenAI::Audio::TranscriptionCreateParams::timestamp_granularity], + ?timestamp_granularities: ::Array[OpenAI::Models::Audio::TranscriptionCreateParams::timestamp_granularity], ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Audio::transcription_stream_event] diff --git a/sig/openai/resources/audio/translations.rbs b/sig/openai/resources/audio/translations.rbs index a2cece60..bd3560b7 100644 --- a/sig/openai/resources/audio/translations.rbs +++ b/sig/openai/resources/audio/translations.rbs @@ -3,7 +3,7 @@ module OpenAI class Audio class Translations def create: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, model: OpenAI::Models::Audio::TranslationCreateParams::model, ?prompt: String, ?response_format: OpenAI::Models::Audio::TranslationCreateParams::response_format, diff --git a/sig/openai/resources/beta/threads.rbs b/sig/openai/resources/beta/threads.rbs index fe80a355..27b8eeaa 100644 --- a/sig/openai/resources/beta/threads.rbs +++ b/sig/openai/resources/beta/threads.rbs @@ -45,7 +45,7 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> OpenAI::Beta::Threads::Run @@ -64,7 +64,7 @@ module OpenAI ?tool_resources: OpenAI::Beta::ThreadCreateAndRunParams::ToolResources?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::ThreadCreateAndRunParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] diff --git a/sig/openai/resources/beta/threads/runs.rbs b/sig/openai/resources/beta/threads/runs.rbs index 8ca38f5d..38743701 100644 --- a/sig/openai/resources/beta/threads/runs.rbs +++ b/sig/openai/resources/beta/threads/runs.rbs @@ -23,7 +23,7 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> OpenAI::Beta::Threads::Run @@ -45,7 +45,7 @@ module OpenAI ?tool_choice: OpenAI::Models::Beta::assistant_tool_choice_option?, ?tools: ::Array[OpenAI::Models::Beta::assistant_tool]?, ?top_p: Float?, - ?truncation_strategy: OpenAI::Beta::TruncationObject?, + ?truncation_strategy: OpenAI::Beta::Threads::RunCreateParams::TruncationStrategy?, ?request_options: OpenAI::request_opts ) -> OpenAI::Internal::Stream[OpenAI::Models::Beta::assistant_stream_event] diff --git a/sig/openai/resources/chat/completions.rbs b/sig/openai/resources/chat/completions.rbs index 3eb309a0..5e457166 100644 --- a/sig/openai/resources/chat/completions.rbs +++ b/sig/openai/resources/chat/completions.rbs @@ -16,7 +16,7 @@ module OpenAI ?max_completion_tokens: Integer?, ?max_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, ?n: Integer?, ?parallel_tool_calls: bool, ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, @@ -50,7 +50,7 @@ module OpenAI ?max_completion_tokens: Integer?, ?max_tokens: Integer?, ?metadata: OpenAI::Models::metadata?, - ?modalities: ::Array[OpenAI::Chat::CompletionCreateParams::modality]?, + ?modalities: ::Array[OpenAI::Models::Chat::CompletionCreateParams::modality]?, ?n: Integer?, ?parallel_tool_calls: bool, ?prediction: OpenAI::Chat::ChatCompletionPredictionContent?, diff --git a/sig/openai/resources/containers.rbs b/sig/openai/resources/containers.rbs new file mode 100644 index 00000000..dfbe77b8 --- /dev/null +++ b/sig/openai/resources/containers.rbs @@ -0,0 +1,33 @@ +module OpenAI + module Resources + class Containers + attr_reader files: OpenAI::Resources::Containers::Files + + def create: ( + name: String, + ?expires_after: OpenAI::ContainerCreateParams::ExpiresAfter, + ?file_ids: ::Array[String], + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::ContainerCreateResponse + + def retrieve: ( + String container_id, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::ContainerRetrieveResponse + + def list: ( + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::ContainerListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::ContainerListResponse] + + def delete: ( + String container_id, + ?request_options: OpenAI::request_opts + ) -> nil + + def initialize: (client: OpenAI::Client) -> void + end + end +end diff --git a/sig/openai/resources/containers/files.rbs b/sig/openai/resources/containers/files.rbs new file mode 100644 index 00000000..561898fb --- /dev/null +++ b/sig/openai/resources/containers/files.rbs @@ -0,0 +1,38 @@ +module OpenAI + module Resources + class Containers + class Files + attr_reader content: OpenAI::Resources::Containers::Files::Content + + def create: ( + String container_id, + ?file: OpenAI::Internal::file_input, + ?file_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Containers::FileCreateResponse + + def retrieve: ( + String file_id, + container_id: String, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Models::Containers::FileRetrieveResponse + + def list: ( + String container_id, + ?after: String, + ?limit: Integer, + ?order: OpenAI::Models::Containers::FileListParams::order, + ?request_options: OpenAI::request_opts + ) -> OpenAI::Internal::CursorPage[OpenAI::Models::Containers::FileListResponse] + + def delete: ( + String file_id, + container_id: String, + ?request_options: OpenAI::request_opts + ) -> nil + + def initialize: (client: OpenAI::Client) -> void + end + end + end +end diff --git a/sig/openai/resources/containers/files/content.rbs b/sig/openai/resources/containers/files/content.rbs new file mode 100644 index 00000000..898c9988 --- /dev/null +++ b/sig/openai/resources/containers/files/content.rbs @@ -0,0 +1,17 @@ +module OpenAI + module Resources + class Containers + class Files + class Content + def retrieve: ( + String file_id, + container_id: String, + ?request_options: OpenAI::request_opts + ) -> nil + + def initialize: (client: OpenAI::Client) -> void + end + end + end + end +end diff --git a/sig/openai/resources/evals.rbs b/sig/openai/resources/evals.rbs index b4ed4454..dd3d6cc5 100644 --- a/sig/openai/resources/evals.rbs +++ b/sig/openai/resources/evals.rbs @@ -5,7 +5,7 @@ module OpenAI def create: ( data_source_config: OpenAI::Models::EvalCreateParams::data_source_config, - testing_criteria: ::Array[OpenAI::EvalCreateParams::testing_criterion], + testing_criteria: ::Array[OpenAI::Models::EvalCreateParams::testing_criterion], ?metadata: OpenAI::Models::metadata?, ?name: String, ?request_options: OpenAI::request_opts diff --git a/sig/openai/resources/files.rbs b/sig/openai/resources/files.rbs index 2af65729..49435b99 100644 --- a/sig/openai/resources/files.rbs +++ b/sig/openai/resources/files.rbs @@ -2,7 +2,7 @@ module OpenAI module Resources class Files def create: ( - file: Pathname | StringIO | IO | OpenAI::FilePart, + file: OpenAI::Internal::file_input, purpose: OpenAI::Models::file_purpose, ?request_options: OpenAI::request_opts ) -> OpenAI::FileObject diff --git a/sig/openai/resources/images.rbs b/sig/openai/resources/images.rbs index 387d37d0..f441f385 100644 --- a/sig/openai/resources/images.rbs +++ b/sig/openai/resources/images.rbs @@ -2,7 +2,7 @@ module OpenAI module Resources class Images def create_variation: ( - image: Pathname | StringIO | IO | OpenAI::FilePart, + image: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageCreateVariationParams::model?, ?n: Integer?, ?response_format: OpenAI::Models::ImageCreateVariationParams::response_format?, @@ -15,7 +15,7 @@ module OpenAI image: OpenAI::Models::ImageEditParams::image, prompt: String, ?background: OpenAI::Models::ImageEditParams::background?, - ?mask: Pathname | StringIO | IO | OpenAI::FilePart, + ?mask: OpenAI::Internal::file_input, ?model: OpenAI::Models::ImageEditParams::model?, ?n: Integer?, ?quality: OpenAI::Models::ImageEditParams::quality?, diff --git a/sig/openai/resources/responses.rbs b/sig/openai/resources/responses.rbs index 6cbfd4aa..97ccd557 100644 --- a/sig/openai/resources/responses.rbs +++ b/sig/openai/resources/responses.rbs @@ -6,6 +6,7 @@ module OpenAI def create: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, + ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?instructions: String?, ?max_output_tokens: Integer?, @@ -28,6 +29,7 @@ module OpenAI def stream_raw: ( input: OpenAI::Models::Responses::ResponseCreateParams::input, model: OpenAI::Models::responses_model, + ?background: bool?, ?include: ::Array[OpenAI::Models::Responses::response_includable]?, ?instructions: String?, ?max_output_tokens: Integer?, @@ -58,6 +60,11 @@ module OpenAI ?request_options: OpenAI::request_opts ) -> nil + def cancel: ( + String response_id, + ?request_options: OpenAI::request_opts + ) -> nil + def initialize: (client: OpenAI::Client) -> void end end diff --git a/sig/openai/resources/uploads/parts.rbs b/sig/openai/resources/uploads/parts.rbs index 20aac99a..41bfee05 100644 --- a/sig/openai/resources/uploads/parts.rbs +++ b/sig/openai/resources/uploads/parts.rbs @@ -4,7 +4,7 @@ module OpenAI class Parts def create: ( String upload_id, - data: Pathname | StringIO | IO | OpenAI::FilePart, + data: OpenAI::Internal::file_input, ?request_options: OpenAI::request_opts ) -> OpenAI::Uploads::UploadPart diff --git a/sig/openai/resources/vector_stores.rbs b/sig/openai/resources/vector_stores.rbs index 7bf0650d..d717bd54 100644 --- a/sig/openai/resources/vector_stores.rbs +++ b/sig/openai/resources/vector_stores.rbs @@ -7,7 +7,7 @@ module OpenAI def create: ( ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, - ?expires_after: OpenAI::VectorStoreExpirationAfter, + ?expires_after: OpenAI::VectorStoreCreateParams::ExpiresAfter, ?file_ids: ::Array[String], ?metadata: OpenAI::Models::metadata?, ?name: String, @@ -21,7 +21,7 @@ module OpenAI def update: ( String vector_store_id, - ?expires_after: OpenAI::VectorStoreExpirationAfter?, + ?expires_after: OpenAI::VectorStoreUpdateParams::ExpiresAfter?, ?metadata: OpenAI::Models::metadata?, ?name: String?, ?request_options: OpenAI::request_opts diff --git a/sig/openai/resources/vector_stores/file_batches.rbs b/sig/openai/resources/vector_stores/file_batches.rbs index 448f8ebb..1228381c 100644 --- a/sig/openai/resources/vector_stores/file_batches.rbs +++ b/sig/openai/resources/vector_stores/file_batches.rbs @@ -5,7 +5,7 @@ module OpenAI def create: ( String vector_store_id, file_ids: ::Array[String], - ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileBatchCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileBatchCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts ) -> OpenAI::VectorStores::VectorStoreFileBatch diff --git a/sig/openai/resources/vector_stores/files.rbs b/sig/openai/resources/vector_stores/files.rbs index 2a650189..dfbccc37 100644 --- a/sig/openai/resources/vector_stores/files.rbs +++ b/sig/openai/resources/vector_stores/files.rbs @@ -5,7 +5,7 @@ module OpenAI def create: ( String vector_store_id, file_id: String, - ?attributes: ::Hash[Symbol, OpenAI::VectorStores::FileCreateParams::attribute]?, + ?attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileCreateParams::attribute]?, ?chunking_strategy: OpenAI::Models::file_chunking_strategy_param, ?request_options: OpenAI::request_opts ) -> OpenAI::VectorStores::VectorStoreFile @@ -19,7 +19,7 @@ module OpenAI def update: ( String file_id, vector_store_id: String, - attributes: ::Hash[Symbol, OpenAI::VectorStores::FileUpdateParams::attribute]?, + attributes: ::Hash[Symbol, OpenAI::Models::VectorStores::FileUpdateParams::attribute]?, ?request_options: OpenAI::request_opts ) -> OpenAI::VectorStores::VectorStoreFile diff --git a/sorbet/config b/sorbet/config index 538c1528..6fe84ed8 100644 --- a/sorbet/config +++ b/sorbet/config @@ -1,2 +1,2 @@ ---dir=rbi +--dir=rbi/ --ignore=test/ diff --git a/test/openai/internal/sorbet_runtime_support_test.rb b/test/openai/internal/sorbet_runtime_support_test.rb index 1c48a5ff..d1179f7e 100644 --- a/test/openai/internal/sorbet_runtime_support_test.rb +++ b/test/openai/internal/sorbet_runtime_support_test.rb @@ -25,6 +25,7 @@ def test_nil_aliases err = OpenAI::Internal::Util::SorbetRuntimeSupport::MissingSorbetRuntimeError assert_raises(err) { OpenAI::Internal::AnyHash } + assert_raises(err) { OpenAI::Internal::FileInput } assert_raises(err) { OpenAI::Internal::Type::Converter::Input } assert_raises(err) { OpenAI::Internal::Type::Converter::CoerceState } assert_raises(err) { OpenAI::Internal::Type::Converter::DumpState } diff --git a/test/openai/resource_namespaces.rb b/test/openai/resource_namespaces.rb index dc01c156..5134069c 100644 --- a/test/openai/resource_namespaces.rb +++ b/test/openai/resource_namespaces.rb @@ -27,11 +27,19 @@ module Checkpoints module Completions end + module Containers + module Files + end + end + module Evals module Runs end end + module Files + end + module FineTuning module Alpha end diff --git a/test/openai/resources/beta/threads/runs_test.rb b/test/openai/resources/beta/threads/runs_test.rb index c43e45d1..bed1e829 100644 --- a/test/openai/resources/beta/threads/runs_test.rb +++ b/test/openai/resources/beta/threads/runs_test.rb @@ -35,7 +35,7 @@ def test_create_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -75,7 +75,7 @@ def test_retrieve_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -115,7 +115,7 @@ def test_update_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -162,7 +162,7 @@ def test_list thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -202,7 +202,7 @@ def test_cancel_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil @@ -243,7 +243,7 @@ def test_submit_tool_outputs_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil diff --git a/test/openai/resources/beta/threads_test.rb b/test/openai/resources/beta/threads_test.rb index 681dad3c..903a5185 100644 --- a/test/openai/resources/beta/threads_test.rb +++ b/test/openai/resources/beta/threads_test.rb @@ -105,7 +105,7 @@ def test_create_and_run_required_params thread_id: String, tool_choice: OpenAI::Beta::AssistantToolChoiceOption | nil, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Beta::AssistantTool]), - truncation_strategy: OpenAI::Beta::TruncationObject | nil, + truncation_strategy: OpenAI::Beta::Threads::Run::TruncationStrategy | nil, usage: OpenAI::Beta::Threads::Run::Usage | nil, temperature: Float | nil, top_p: Float | nil diff --git a/test/openai/resources/containers/files/content_test.rb b/test/openai/resources/containers/files/content_test.rb new file mode 100644 index 00000000..994b2806 --- /dev/null +++ b/test/openai/resources/containers/files/content_test.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +require_relative "../../../test_helper" + +class OpenAI::Test::Resources::Containers::Files::ContentTest < OpenAI::Test::ResourceTest + def test_retrieve_required_params + response = @openai.containers.files.content.retrieve("file_id", container_id: "container_id") + + assert_pattern do + response => nil + end + end +end diff --git a/test/openai/resources/containers/files_test.rb b/test/openai/resources/containers/files_test.rb new file mode 100644 index 00000000..d2522cc7 --- /dev/null +++ b/test/openai/resources/containers/files_test.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require_relative "../../test_helper" + +class OpenAI::Test::Resources::Containers::FilesTest < OpenAI::Test::ResourceTest + def test_create + response = @openai.containers.files.create("container_id") + + assert_pattern do + response => OpenAI::Models::Containers::FileCreateResponse + end + + assert_pattern do + response => { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + end + end + + def test_retrieve_required_params + response = @openai.containers.files.retrieve("file_id", container_id: "container_id") + + assert_pattern do + response => OpenAI::Models::Containers::FileRetrieveResponse + end + + assert_pattern do + response => { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + end + end + + def test_list + response = @openai.containers.files.list("container_id") + + assert_pattern do + response => OpenAI::Internal::CursorPage + end + + row = response.to_enum.first + return if row.nil? + + assert_pattern do + row => OpenAI::Models::Containers::FileListResponse + end + + assert_pattern do + row => { + id: String, + bytes: Integer, + container_id: String, + created_at: Integer, + object: Symbol, + path: String, + source: String + } + end + end + + def test_delete_required_params + response = @openai.containers.files.delete("file_id", container_id: "container_id") + + assert_pattern do + response => nil + end + end +end diff --git a/test/openai/resources/containers_test.rb b/test/openai/resources/containers_test.rb new file mode 100644 index 00000000..a04742ee --- /dev/null +++ b/test/openai/resources/containers_test.rb @@ -0,0 +1,77 @@ +# frozen_string_literal: true + +require_relative "../test_helper" + +class OpenAI::Test::Resources::ContainersTest < OpenAI::Test::ResourceTest + def test_create_required_params + response = @openai.containers.create(name: "name") + + assert_pattern do + response => OpenAI::Models::ContainerCreateResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerCreateResponse::ExpiresAfter | nil + } + end + end + + def test_retrieve + response = @openai.containers.retrieve("container_id") + + assert_pattern do + response => OpenAI::Models::ContainerRetrieveResponse + end + + assert_pattern do + response => { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerRetrieveResponse::ExpiresAfter | nil + } + end + end + + def test_list + response = @openai.containers.list + + assert_pattern do + response => OpenAI::Internal::CursorPage + end + + row = response.to_enum.first + return if row.nil? + + assert_pattern do + row => OpenAI::Models::ContainerListResponse + end + + assert_pattern do + row => { + id: String, + created_at: Integer, + name: String, + object: String, + status: String, + expires_after: OpenAI::Models::ContainerListResponse::ExpiresAfter | nil + } + end + end + + def test_delete + response = @openai.containers.delete("container_id") + + assert_pattern do + response => nil + end + end +end diff --git a/test/openai/resources/responses/input_items_test.rb b/test/openai/resources/responses/input_items_test.rb index 3c32f365..51476189 100644 --- a/test/openai/resources/responses/input_items_test.rb +++ b/test/openai/resources/responses/input_items_test.rb @@ -27,6 +27,14 @@ def test_list in OpenAI::Responses::ResponseFunctionWebSearch in OpenAI::Responses::ResponseFunctionToolCallItem in OpenAI::Responses::ResponseFunctionToolCallOutputItem + in OpenAI::Responses::ResponseItem::ImageGenerationCall + in OpenAI::Responses::ResponseCodeInterpreterToolCall + in OpenAI::Responses::ResponseItem::LocalShellCall + in OpenAI::Responses::ResponseItem::LocalShellCallOutput + in OpenAI::Responses::ResponseItem::McpListTools + in OpenAI::Responses::ResponseItem::McpApprovalRequest + in OpenAI::Responses::ResponseItem::McpApprovalResponse + in OpenAI::Responses::ResponseItem::McpCall end end @@ -77,6 +85,57 @@ def test_list output: String, status: OpenAI::Responses::ResponseFunctionToolCallOutputItem::Status | nil } + in { + type: :image_generation_call, + id: String, + result: String | nil, + status: OpenAI::Responses::ResponseItem::ImageGenerationCall::Status + } + in { + type: :code_interpreter_call, + id: String, + code: String, + results: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::ResponseCodeInterpreterToolCall::Result]), + status: OpenAI::Responses::ResponseCodeInterpreterToolCall::Status, + container_id: String | nil + } + in { + type: :local_shell_call, + id: String, + action: OpenAI::Responses::ResponseItem::LocalShellCall::Action, + call_id: String, + status: OpenAI::Responses::ResponseItem::LocalShellCall::Status + } + in { + type: :local_shell_call_output, + id: String, + output: String, + status: OpenAI::Responses::ResponseItem::LocalShellCallOutput::Status | nil + } + in { + type: :mcp_list_tools, + id: String, + server_label: String, + tools: ^(OpenAI::Internal::Type::ArrayOf[OpenAI::Responses::ResponseItem::McpListTools::Tool]), + error: String | nil + } + in {type: :mcp_approval_request, id: String, arguments: String, name: String, server_label: String} + in { + type: :mcp_approval_response, + id: String, + approval_request_id: String, + approve: OpenAI::Internal::Type::Boolean, + reason: String | nil + } + in { + type: :mcp_call, + id: String, + arguments: String, + name: String, + server_label: String, + error: String | nil, + output: String | nil + } end end end diff --git a/test/openai/resources/responses_test.rb b/test/openai/resources/responses_test.rb index a01200f4..deb7c605 100644 --- a/test/openai/resources/responses_test.rb +++ b/test/openai/resources/responses_test.rb @@ -26,6 +26,7 @@ def test_create_required_params tool_choice: OpenAI::Responses::Response::ToolChoice, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool]), top_p: Float | nil, + background: OpenAI::Internal::Type::Boolean | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, reasoning: OpenAI::Reasoning | nil, @@ -62,6 +63,7 @@ def test_retrieve tool_choice: OpenAI::Responses::Response::ToolChoice, tools: ^(OpenAI::Internal::Type::ArrayOf[union: OpenAI::Responses::Tool]), top_p: Float | nil, + background: OpenAI::Internal::Type::Boolean | nil, max_output_tokens: Integer | nil, previous_response_id: String | nil, reasoning: OpenAI::Reasoning | nil, @@ -82,4 +84,12 @@ def test_delete response => nil end end + + def test_cancel + response = @openai.responses.cancel("resp_677efb5139a88190b512bc3fef8e535d") + + assert_pattern do + response => nil + end + end end diff --git a/test/openai/resources/vector_stores_test.rb b/test/openai/resources/vector_stores_test.rb index 4e020135..bbce9895 100644 --- a/test/openai/resources/vector_stores_test.rb +++ b/test/openai/resources/vector_stores_test.rb @@ -21,7 +21,7 @@ def test_create object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end @@ -45,7 +45,7 @@ def test_retrieve object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end @@ -69,7 +69,7 @@ def test_update object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end @@ -100,7 +100,7 @@ def test_list object: Symbol, status: OpenAI::VectorStore::Status, usage_bytes: Integer, - expires_after: OpenAI::VectorStoreExpirationAfter | nil, + expires_after: OpenAI::VectorStore::ExpiresAfter | nil, expires_at: Integer | nil } end