From 7eb91852c03eaba177464060631c2645d3db63d0 Mon Sep 17 00:00:00 2001 From: meorphis Date: Wed, 16 Apr 2025 18:34:01 -0400 Subject: [PATCH 01/15] chore(internal): configure releases --- .github/workflows/create-releases.yml | 42 +++++++++++++++++++++++++++ .github/workflows/publish-gem.yml | 6 +--- 2 files changed, 43 insertions(+), 5 deletions(-) create mode 100644 .github/workflows/create-releases.yml diff --git a/.github/workflows/create-releases.yml b/.github/workflows/create-releases.yml new file mode 100644 index 00000000..eda1ae13 --- /dev/null +++ b/.github/workflows/create-releases.yml @@ -0,0 +1,42 @@ +name: Create releases +on: + schedule: + - cron: '0 5 * * *' # every day at 5am UTC + push: + branches: + - main + +jobs: + release: + name: release + if: github.ref == 'refs/heads/main' && github.repository == 'openai/openai-ruby' + runs-on: ubuntu-latest + environment: publish + permissions: + contents: read + id-token: write + + steps: + - uses: actions/checkout@v4 + + - uses: stainless-api/trigger-release-please@v1 + id: release + with: + repo: ${{ github.event.repository.full_name }} + stainless-api-key: ${{ secrets.STAINLESS_API_KEY }} + + - name: Set up Ruby + uses: ruby/setup-ruby@v1 + with: + bundler-cache: false + ruby-version: '3.1' + - run: |- + bundle install + + - name: Publish to RubyGems.org + run: | + bash ./bin/publish-gem + env: + # `RUBYGEMS_HOST` is only required for private gem repositories, not https://rubygems.org + RUBYGEMS_HOST: ${{ secrets.OPENAI_RUBYGEMS_HOST || secrets.RUBYGEMS_HOST }} + GEM_HOST_API_KEY: ${{ secrets.OPENAI_GEM_HOST_API_KEY || secrets.GEM_HOST_API_KEY }} diff --git a/.github/workflows/publish-gem.yml b/.github/workflows/publish-gem.yml index d6ba1c4a..7502f1c2 100644 --- a/.github/workflows/publish-gem.yml +++ b/.github/workflows/publish-gem.yml @@ -1,13 +1,9 @@ -# This workflow is triggered when a GitHub release is created. -# It can also be run manually to re-publish to rubygems.org in case it failed for some reason. +# Workflow for re-publishing to rubygems.org in case it failed for some reason. # You can run this workflow by navigating to https://www.github.com/openai/openai-python/actions/workflows/publish-gem.yml name: Publish Gem on: workflow_dispatch: - release: - types: [published] - jobs: publish: name: publish From b0a523ea9e176cb29f54d921539d05c33ddda7f5 Mon Sep 17 00:00:00 2001 From: meorphis Date: Wed, 16 Apr 2025 18:49:19 -0400 Subject: [PATCH 02/15] fix --- .github/workflows/create-releases.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/create-releases.yml b/.github/workflows/create-releases.yml index eda1ae13..2ba342e4 100644 --- a/.github/workflows/create-releases.yml +++ b/.github/workflows/create-releases.yml @@ -1,7 +1,5 @@ name: Create releases on: - schedule: - - cron: '0 5 * * *' # every day at 5am UTC push: branches: - main @@ -26,6 +24,7 @@ jobs: stainless-api-key: ${{ secrets.STAINLESS_API_KEY }} - name: Set up Ruby + if: ${{ steps.release.outputs.releases_created }} uses: ruby/setup-ruby@v1 with: bundler-cache: false @@ -34,6 +33,7 @@ jobs: bundle install - name: Publish to RubyGems.org + if: ${{ steps.release.outputs.releases_created }} run: | bash ./bin/publish-gem env: From 6c89d1376ef3d4c5cdc1910cf50b69f3d8d91c6a Mon Sep 17 00:00:00 2001 From: meorphis <108296353+meorphis@users.noreply.github.com> Date: Wed, 16 Apr 2025 19:25:31 -0400 Subject: [PATCH 03/15] Update create-releases.yml --- .github/workflows/create-releases.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/create-releases.yml b/.github/workflows/create-releases.yml index 2ba342e4..c1ea30fa 100644 --- a/.github/workflows/create-releases.yml +++ b/.github/workflows/create-releases.yml @@ -3,6 +3,7 @@ on: push: branches: - main + workflow_dispatch: jobs: release: From 896142abf1bb03f1eb48e0754cbff04edd081a0e Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 11:47:06 +0000 Subject: [PATCH 04/15] fix(client): send correct HTTP path --- lib/openai/internal/transport/pooled_net_requester.rb | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index a9ef117f..df4e1205 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -57,11 +57,15 @@ def calibrate_socket_timeout(conn, deadline) # @return [Array(Net::HTTPGenericRequest, Proc)] def build_request(request, &blk) method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) + + # ensure we construct a URI class of the right scheme + url = URI(url.to_s) + req = Net::HTTPGenericRequest.new( method.to_s.upcase, !body.nil?, method != :head, - url.to_s + url ) headers.each { req[_1] = _2 } From d060adf81aadb6b138428bdbde79633fd0dff230 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 17:00:54 +0000 Subject: [PATCH 05/15] chore(internal): contribute.md and contributor QoL improvements --- .gitignore | 6 +- .ruby-version | 1 + CONTRIBUTING.md | 132 ++++++++++++++++++++++++++++++++++++++++++ Rakefile | 65 ++++++++++++++++----- openai.gemspec | 17 ++++-- scripts/bootstrap | 4 +- scripts/format | 1 + scripts/lint | 2 + scripts/test | 2 +- sorbet/rbi/.gitignore | 2 + 10 files changed, 209 insertions(+), 23 deletions(-) create mode 100644 .ruby-version create mode 100644 CONTRIBUTING.md create mode 100644 sorbet/rbi/.gitignore diff --git a/.gitignore b/.gitignore index 8b1228a8..3d26ceed 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,10 @@ *.gem .idea/ +.ignore .prism.log .ruby-lsp/ .yardoc/ -Brewfile.lock.json bin/tapioca +Brewfile.lock.json doc/ -sorbet/* -!/sorbet/config +sorbet/tapioca/* diff --git a/.ruby-version b/.ruby-version new file mode 100644 index 00000000..fd2a0186 --- /dev/null +++ b/.ruby-version @@ -0,0 +1 @@ +3.1.0 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..d800af1c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,132 @@ +## Setting up the environment + +This repository contains a `.ruby-version` file, which should work with either [rbenv](https://github.com/rbenv/rbenv) or [asdf](https://github.com/asdf-vm/asdf) with the [ruby plugin](https://github.com/asdf-vm/asdf-ruby). + +Please follow the instructions for your preferred version manager to install the Ruby version specified in the `.ruby-version` file. + +To set up the repository, run: + +```bash +$ ./scripts/bootstrap +``` + +This will install all the required dependencies. + +## Modifying/Adding code + +Most of the SDK is generated code. Modifications to code will be persisted between generations, but may result in merge conflicts between manual patches and changes from the generator. The generator will never modify the contents `examples/` directory. + +## Adding and running examples + +All files in the `examples/` directory are not modified by the generator and can be freely edited or added to. + +```ruby +#!/usr/bin/env ruby +# frozen_string_literal: true + +require_relative "../lib/openai" + +# ... +``` + +```bash +$ chmod +x './examples/.rb' + +# run the example against your api +$ ruby './examples/.rb' +``` + +## Using the repository from source + +If you’d like to use the repository from source, you can either install from git or reference a cloned repository: + +To install via git in your `Gemfile`: + +```ruby +gem "openai", git: "https://www.github.com/openai/openai-ruby" +``` + +Alternatively, reference local copy of the repo: + +```bash +$ git clone -- 'https://www.github.com/openai/openai-ruby' '' +``` + +```ruby +gem "openai", path: "" +``` + +## Running commands + +Running `rake` by itself will show all runnable commands. + +```bash +$ bundle exec rake +``` + +## Running tests + +Most tests require you to [set up a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests. + +```bash +$ npx prism mock path/to/your/openapi.yml +``` + +```bash +$ bundle exec rake test +``` + +## Linting and formatting + +This repository uses [rubocop](https://github.com/rubocop/rubocop) for linting and formatting of `*.rb` and `*.rbi` files. [syntax_tree](https://github.com/ruby-syntax-tree/syntax_tree) is used for formatting `*.rbs` files. + +There are two separate type checkers supported by this library: [sorbet](https://github.com/sorbet/sorbet) and [steep](https://github.com/soutaro/steep) are used for verifying `*.rbi` and `*.rbs` files respectively. + +To lint and typecheck: + +```bash +$ bundle exec rake lint +``` + +To format and fix all lint issues automatically: + +```bash +$ bundle exec rake format +``` + +## Editor Support + +### Solargraph + +This library includes [Solargraph](https://solargraph.org) support for both auto-completion and go to definition. + +```ruby +gem "solargraph", group: :development +``` + +Note: if you had installed the gem locally using `git: "..."` or `path: "..."`, you must update your [`.solargraph.yml`](https://solargraph.org/guides/configuration) to include the path to where the gem is located: + +```yaml +include: + - '/lib/**/*.rb' +``` + +### Sorbet + +[Sorbet](https://sorbet.org) should mostly work out of the box when editing this library directly. However, there are a some caveats due to the colocation of `*.rb` and `*.rbi` files in the same project. These issues should not otherwise manifest when this library is used as a dependency. + +1. For go to definition usages, sorbet might get confused and may not always navigate to the correct location. + +2. For each generic type in `*.rbi` files, a spurious "Duplicate type member" error is present. + +### Ruby LSP + +The Ruby LSP has [best effort support](https://shopify.github.io/ruby-lsp/#guessed-types) for inferring type information from Ruby code, and as such it may not always be able to provide accurate type information. + +## Documentation Preview + +To preview the documentation, run: + +```bash +$ bundle exec rake docs:preview [PORT=8808] +``` diff --git a/Rakefile b/Rakefile index 41d7c926..7a8155db 100644 --- a/Rakefile +++ b/Rakefile @@ -1,5 +1,6 @@ # frozen_string_literal: true +require "pathname" require "securerandom" require "shellwords" @@ -7,10 +8,23 @@ require "minitest/test_task" require "rake/clean" require "rubocop/rake_task" -CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/]) +tapioca = "sorbet/tapioca" +ignore_file = ".ignore" -multitask(default: [:test]) +CLEAN.push(*%w[.idea/ .ruby-lsp/ .yardoc/ doc/], *FileList["*.gem"], ignore_file) +CLOBBER.push(*%w[sorbet/rbi/annotations/ sorbet/rbi/gems/], tapioca) + +multitask(:default) do + sh(*%w[rake --tasks]) +end + +desc("Preview docs; use `PORT=` to change the port") +multitask(:"docs:preview") do + sh(*%w[yard server --bind [::] --reload --quiet --port], ENV.fetch("PORT", "8808")) +end + +desc("Run test suites; use `TEST=path/to/test.rb` to run a specific test file") multitask(:test) do rb = FileList[ENV.fetch("TEST", "./test/**/*_test.rb")] @@ -23,17 +37,20 @@ end rubo_find = %w[find ./lib ./test ./rbi -type f -and ( -name *.rb -or -name *.rbi ) -print0] xargs = %w[xargs --no-run-if-empty --null --max-procs=0 --max-args=300 --] -multitask(:rubocop) do +desc("Lint `*.rb(i)`") +multitask(:"lint:rubocop") do lint = xargs + %w[rubocop --fail-level E] + (ENV.key?("CI") ? %w[--format github] : []) sh("#{rubo_find.shelljoin} | #{lint.shelljoin}") end -multitask(:ruboformat) do +desc("Format `*.rb(i)`") +multitask(:"format:rubocop") do fmt = xargs + %w[rubocop --fail-level F --autocorrect --format simple --] sh("#{rubo_find.shelljoin} | #{fmt.shelljoin}") end -multitask(:syntax_tree) do +desc("Format `*.rbs`") +multitask(:"format:syntax_tree") do find = %w[find ./sig -type f -name *.rbs -print0] inplace = /darwin|bsd/ =~ RUBY_PLATFORM ? %w[-i''] : %w[-i] uuid = SecureRandom.uuid @@ -74,27 +91,49 @@ multitask(:syntax_tree) do fail unless success end -multitask(format: [:ruboformat, :syntax_tree]) +desc("Format everything") +multitask(format: [:"format:rubocop", :"format:syntax_tree"]) -multitask(:steep) do +desc("Typecheck `*.rbs`") +multitask(:"typecheck:steep") do sh(*%w[steep check]) end -multitask(:sorbet) do +desc("Typecheck `*.rbi`") +multitask(:"typecheck:sorbet") do sh(*%w[srb typecheck]) end -file("sorbet/tapioca") do +file(tapioca) do sh(*%w[tapioca init]) end -multitask(typecheck: [:steep, :sorbet]) -multitask(lint: [:rubocop, :typecheck]) +desc("Typecheck everything") +multitask(typecheck: [:"typecheck:steep", :"typecheck:sorbet"]) + +desc("Lint everything") +multitask(lint: [:"lint:rubocop", :typecheck]) + +desc("Build yard docs") +multitask(:"build:docs") do + sh(*%w[yard]) +end + +desc("Build ruby gem") +multitask(:"build:gem") do + # optimizing for grepping through the gem bundle: many tools honour `.ignore` files, including VSCode + # + # both `rbi` and `sig` directories are navigable by their respective tool chains and therefore can be ignored by tools such as `rg` + Pathname(ignore_file).write(<<~GLOB) + rbi/* + sig/* + GLOB -multitask(:build) do sh(*%w[gem build -- openai.gemspec]) + rm_rf(ignore_file) end -multitask(release: [:build]) do +desc("Release ruby gem") +multitask(release: [:"build:gem"]) do sh(*%w[gem push], *FileList["openai-*.gem"]) end diff --git a/openai.gemspec b/openai.gemspec index 1171bdbe..64c1b6d9 100644 --- a/openai.gemspec +++ b/openai.gemspec @@ -8,12 +8,21 @@ Gem::Specification.new do |s| s.summary = "Ruby library to access the OpenAI API" s.authors = ["OpenAI"] s.email = "support@openai.com" - s.files = Dir["lib/**/*.rb", "rbi/**/*.rbi", "sig/**/*.rbs", "manifest.yaml", "CHANGELOG.md", "SECURITY.md"] - s.extra_rdoc_files = ["README.md"] - s.required_ruby_version = ">= 3.0.0" - s.add_dependency "connection_pool" s.homepage = "https://gemdocs.org/gems/openai" s.metadata["homepage_uri"] = s.homepage s.metadata["source_code_uri"] = "https://github.com/openai/openai-ruby" s.metadata["rubygems_mfa_required"] = false.to_s + s.required_ruby_version = ">= 3.0.0" + + s.files = Dir[ + "lib/**/*.rb", + "rbi/**/*.rbi", + "sig/**/*.rbs", + "manifest.yaml", + "SECURITY.md", + "CHANGELOG.md", + ".ignore" + ] + s.extra_rdoc_files = ["README.md"] + s.add_dependency "connection_pool" end diff --git a/scripts/bootstrap b/scripts/bootstrap index 88566757..cc31aa85 100755 --- a/scripts/bootstrap +++ b/scripts/bootstrap @@ -2,7 +2,7 @@ set -e -cd "$(dirname "$0")/.." +cd -- "$(dirname -- "$0")/.." if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ]; then brew bundle check >/dev/null 2>&1 || { @@ -13,4 +13,4 @@ fi echo "==> Installing Ruby dependencies…" -bundle install +exec -- bundle install "$@" diff --git a/scripts/format b/scripts/format index 67b400de..177d1e63 100755 --- a/scripts/format +++ b/scripts/format @@ -5,4 +5,5 @@ set -e cd -- "$(dirname -- "$0")/.." echo "==> Running formatters" + exec -- bundle exec rake format "$@" diff --git a/scripts/lint b/scripts/lint index 39581dc1..08b0dbeb 100755 --- a/scripts/lint +++ b/scripts/lint @@ -4,4 +4,6 @@ set -e cd -- "$(dirname -- "$0")/.." +echo "==> Running linters" + exec -- bundle exec rake lint "$@" diff --git a/scripts/test b/scripts/test index 2e1fe093..8e5d35cd 100755 --- a/scripts/test +++ b/scripts/test @@ -2,7 +2,7 @@ set -e -cd "$(dirname "$0")/.." +cd -- "$(dirname -- "$0")/.." RED='\033[0;31m' GREEN='\033[0;32m' diff --git a/sorbet/rbi/.gitignore b/sorbet/rbi/.gitignore new file mode 100644 index 00000000..d6b7ef32 --- /dev/null +++ b/sorbet/rbi/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore From 548bfaf81a4947860ec35ff7efafb144da4863bb Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 17:09:17 +0000 Subject: [PATCH 06/15] fix: always send idempotency header when specified as a request option --- lib/openai/internal/transport/base_client.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/openai/internal/transport/base_client.rb b/lib/openai/internal/transport/base_client.rb index 24d1f21d..30b12835 100644 --- a/lib/openai/internal/transport/base_client.rb +++ b/lib/openai/internal/transport/base_client.rb @@ -253,7 +253,7 @@ def initialize( if @idempotency_header && !headers.key?(@idempotency_header) && - !Net::HTTP::IDEMPOTENT_METHODS_.include?(method.to_s.upcase) + (!Net::HTTP::IDEMPOTENT_METHODS_.include?(method.to_s.upcase) || opts.key?(:idempotency_key)) headers[@idempotency_header] = opts.fetch(:idempotency_key) { generate_idempotency_key } end From 84308a6683e6ed9d520b05e0ac828de662fe0198 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Thu, 17 Apr 2025 20:47:21 +0000 Subject: [PATCH 07/15] chore: refine `#inspect` and `#to_s` for model classes --- lib/openai/internal/type/base_model.rb | 46 ++++++++++++++------- rbi/lib/openai/internal/type/base_model.rbi | 9 ++++ sig/openai/internal/type/base_model.rbs | 6 +++ 3 files changed, 46 insertions(+), 15 deletions(-) diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index efc555b7..239dbfd3 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -338,6 +338,27 @@ def deconstruct_keys(keys) .to_h end + class << self + # @param model [OpenAI::Internal::Type::BaseModel] + # + # @return [Hash{Symbol=>Object}] + def walk(model) + walk = ->(x) do + case x + in OpenAI::Internal::Type::BaseModel + walk.call(x.to_h) + in Hash + x.transform_values(&walk) + in Array + x.map(&walk) + else + x + end + end + walk.call(model) + end + end + # @param a [Object] # # @return [String] @@ -373,13 +394,11 @@ def inspect(depth: 0) depth = depth.succ deferred = fields.transform_values do |field| type, required, nilable = field.fetch_values(:type, :required, :nilable) - -> do - [ - OpenAI::Internal::Type::Converter.inspect(type, depth: depth), - !required || nilable ? "nil" : nil - ].compact.join(" | ") - end - .tap { _1.define_singleton_method(:inspect) { call } } + inspected = [ + OpenAI::Internal::Type::Converter.inspect(type, depth: depth), + !required || nilable ? "nil" : nil + ].compact.join(" | ") + -> { inspected }.tap { _1.define_singleton_method(:inspect) { call } } end "#{name}[#{deferred.inspect}]" @@ -389,15 +408,12 @@ def inspect(depth: 0) # @api private # # @return [String] - def inspect - rows = @data.map do - "#{_1}=#{self.class.known_fields.key?(_1) ? public_send(_1).inspect : ''}" - rescue OpenAI::Errors::ConversionError - "#{_1}=#{_2.inspect}" - end + def to_s = self.class.walk(@data).to_s - "#<#{self.class}:0x#{object_id.to_s(16)} #{rows.join(' ')}>" - end + # @api private + # + # @return [String] + def inspect = "#<#{self.class}:0x#{object_id.to_s(16)} #{self}>" end end end diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index 23fbb5a9..32f6a62c 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -175,6 +175,11 @@ module OpenAI sig { params(keys: T.nilable(T::Array[Symbol])).returns(OpenAI::Internal::AnyHash) } def deconstruct_keys(keys); end + class << self + sig { params(model: OpenAI::Internal::Type::BaseModel).returns(OpenAI::Internal::AnyHash) } + def walk(model); end + end + sig { params(a: T.anything).returns(String) } def to_json(*a); end @@ -191,6 +196,10 @@ module OpenAI def inspect(depth: 0); end end + # @api private + sig { returns(String) } + def to_s; end + # @api private sig { returns(String) } def inspect; end diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index f6bfeb6a..e3a7d42c 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -69,6 +69,10 @@ module OpenAI def deconstruct_keys: (::Array[Symbol]? keys) -> ::Hash[Symbol, top] + def self.walk: ( + OpenAI::Internal::Type::BaseModel model + ) -> ::Hash[Symbol, top] + def to_json: (*top a) -> String def to_yaml: (*top a) -> String @@ -77,6 +81,8 @@ module OpenAI def self.inspect: (?depth: Integer) -> String + def to_s: -> String + def inspect: -> String end end From 7c0321329658a6d2823f9022a77be5965186b94c Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 09:42:03 +0000 Subject: [PATCH 08/15] chore: make sorbet enums easier to read --- rbi/lib/openai/models/all_models.rbi | 5 +- .../models/audio/speech_create_params.rbi | 10 ++- rbi/lib/openai/models/audio/speech_model.rbi | 2 +- .../audio/transcription_create_params.rbi | 11 +--- .../models/audio/transcription_include.rbi | 3 +- .../audio/translation_create_params.rbi | 5 +- rbi/lib/openai/models/audio_model.rbi | 2 +- .../openai/models/audio_response_format.rbi | 2 +- rbi/lib/openai/models/batch.rbi | 2 +- rbi/lib/openai/models/batch_create_params.rbi | 6 +- .../models/beta/assistant_create_params.rbi | 2 +- .../models/beta/assistant_list_params.rbi | 3 +- .../models/beta/assistant_tool_choice.rbi | 3 +- .../beta/assistant_tool_choice_option.rbi | 5 +- .../models/beta/assistant_update_params.rbi | 5 +- .../openai/models/beta/file_search_tool.rbi | 9 +-- .../beta/thread_create_and_run_params.rbi | 14 +---- .../models/beta/thread_create_params.rbi | 3 +- .../openai/models/beta/threads/image_file.rbi | 3 +- .../models/beta/threads/image_file_delta.rbi | 3 +- .../openai/models/beta/threads/image_url.rbi | 3 +- .../models/beta/threads/image_url_delta.rbi | 3 +- .../openai/models/beta/threads/message.rbi | 9 +-- .../beta/threads/message_create_params.rbi | 3 +- .../models/beta/threads/message_delta.rbi | 3 +- .../beta/threads/message_list_params.rbi | 3 +- rbi/lib/openai/models/beta/threads/run.rbi | 9 +-- .../models/beta/threads/run_create_params.rbi | 20 +----- .../models/beta/threads/run_list_params.rbi | 3 +- .../openai/models/beta/threads/run_status.rbi | 2 +- .../threads/runs/file_search_tool_call.rbi | 18 +----- .../models/beta/threads/runs/run_step.rbi | 9 +-- .../beta/threads/runs/run_step_include.rbi | 3 +- .../beta/threads/runs/step_list_params.rbi | 3 +- .../openai/models/chat/chat_completion.rbi | 6 +- .../chat/chat_completion_audio_param.rbi | 8 +-- .../models/chat/chat_completion_chunk.rbi | 18 ++---- .../chat_completion_content_part_image.rbi | 9 +-- ...at_completion_content_part_input_audio.rbi | 9 +-- .../models/chat/chat_completion_modality.rbi | 3 +- .../models/chat/chat_completion_role.rbi | 2 +- .../chat_completion_tool_choice_option.rbi | 5 +- .../models/chat/completion_create_params.rbi | 28 ++------- .../models/chat/completion_list_params.rbi | 3 +- .../chat/completions/message_list_params.rbi | 3 +- rbi/lib/openai/models/chat_model.rbi | 2 +- rbi/lib/openai/models/comparison_filter.rbi | 2 +- rbi/lib/openai/models/completion_choice.rbi | 3 +- .../models/completion_create_params.rbi | 5 +- rbi/lib/openai/models/compound_filter.rbi | 2 +- .../openai/models/embedding_create_params.rbi | 5 +- rbi/lib/openai/models/embedding_model.rbi | 2 +- rbi/lib/openai/models/eval_create_params.rbi | 54 ++-------------- .../openai/models/eval_label_model_grader.rbi | 21 ++----- rbi/lib/openai/models/eval_list_params.rbi | 4 +- .../models/eval_string_check_grader.rbi | 3 +- .../models/eval_text_similarity_grader.rbi | 3 +- ...reate_eval_completions_run_data_source.rbi | 57 +++-------------- .../openai/models/evals/run_list_params.rbi | 6 +- .../evals/runs/output_item_list_params.rbi | 6 +- rbi/lib/openai/models/file_list_params.rbi | 2 +- rbi/lib/openai/models/file_object.rbi | 4 +- rbi/lib/openai/models/file_purpose.rbi | 2 +- .../permission_retrieve_params.rbi | 9 +-- .../models/fine_tuning/fine_tuning_job.rbi | 6 +- .../fine_tuning/fine_tuning_job_event.rbi | 6 +- .../models/fine_tuning/job_create_params.rbi | 8 +-- .../models/image_create_variation_params.rbi | 8 +-- rbi/lib/openai/models/image_edit_params.rbi | 7 +-- .../openai/models/image_generate_params.rbi | 14 ++--- rbi/lib/openai/models/image_model.rbi | 2 +- rbi/lib/openai/models/moderation.rbi | 63 ++++--------------- .../models/moderation_create_params.rbi | 2 +- rbi/lib/openai/models/moderation_model.rbi | 2 +- rbi/lib/openai/models/reasoning.rbi | 5 +- rbi/lib/openai/models/reasoning_effort.rbi | 2 +- .../openai/models/responses/computer_tool.rbi | 3 +- .../models/responses/easy_input_message.rbi | 6 +- .../models/responses/file_search_tool.rbi | 3 +- .../responses/input_item_list_params.rbi | 3 +- rbi/lib/openai/models/responses/response.rbi | 9 +-- .../response_code_interpreter_tool_call.rbi | 3 +- .../responses/response_computer_tool_call.rbi | 15 +---- ...esponse_computer_tool_call_output_item.rbi | 3 +- .../responses/response_create_params.rbi | 8 +-- .../models/responses/response_error.rbi | 3 +- .../response_file_search_tool_call.rbi | 3 +- .../responses/response_function_tool_call.rbi | 3 +- ...esponse_function_tool_call_output_item.rbi | 3 +- .../response_function_web_search.rbi | 3 +- .../models/responses/response_includable.rbi | 3 +- .../models/responses/response_input_audio.rbi | 3 +- .../models/responses/response_input_image.rbi | 3 +- .../models/responses/response_input_item.rbi | 27 ++------ .../responses/response_input_message_item.rbi | 9 +-- .../responses/response_output_message.rbi | 3 +- .../responses/response_reasoning_item.rbi | 3 +- .../models/responses/response_status.rbi | 3 +- .../models/responses/tool_choice_options.rbi | 3 +- .../models/responses/tool_choice_types.rbi | 3 +- .../models/responses/web_search_tool.rbi | 6 +- rbi/lib/openai/models/responses_model.rbi | 5 +- rbi/lib/openai/models/upload.rbi | 2 +- rbi/lib/openai/models/vector_store.rbi | 2 +- .../models/vector_store_list_params.rbi | 3 +- .../models/vector_store_search_params.rbi | 3 +- .../models/vector_store_search_response.rbi | 3 +- .../file_batch_list_files_params.rbi | 6 +- .../models/vector_stores/file_list_params.rbi | 6 +- .../vector_stores/vector_store_file.rbi | 6 +- .../vector_stores/vector_store_file_batch.rbi | 3 +- 111 files changed, 207 insertions(+), 566 deletions(-) diff --git a/rbi/lib/openai/models/all_models.rbi b/rbi/lib/openai/models/all_models.rbi index 271f8306..82f906f3 100644 --- a/rbi/lib/openai/models/all_models.rbi +++ b/rbi/lib/openai/models/all_models.rbi @@ -9,8 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } O1_PRO = T.let(:"o1-pro", OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol) O1_PRO_2025_03_19 = @@ -27,7 +26,7 @@ module OpenAI sig do override .returns( - [String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::AllModels::ResponsesOnlyModel::OrSymbol] + [String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::AllModels::ResponsesOnlyModel::TaggedSymbol] ) end def self.variants; end diff --git a/rbi/lib/openai/models/audio/speech_create_params.rbi b/rbi/lib/openai/models/audio/speech_create_params.rbi index 8dd902b4..68db9bed 100644 --- a/rbi/lib/openai/models/audio/speech_create_params.rbi +++ b/rbi/lib/openai/models/audio/speech_create_params.rbi @@ -91,7 +91,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Audio::SpeechModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::Audio::SpeechModel::TaggedSymbol]) } def self.variants; end end @@ -102,12 +102,11 @@ module OpenAI module Voice extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Audio::SpeechCreateParams::Voice::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol]) } def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ALLOY = T.let(:alloy, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) ASH = T.let(:ash, OpenAI::Models::Audio::SpeechCreateParams::Voice::TaggedSymbol) @@ -128,8 +127,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MP3 = T.let(:mp3, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) OPUS = T.let(:opus, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio/speech_model.rbi b/rbi/lib/openai/models/audio/speech_model.rbi index 2f4f0bf8..e049d032 100644 --- a/rbi/lib/openai/models/audio/speech_model.rbi +++ b/rbi/lib/openai/models/audio/speech_model.rbi @@ -7,7 +7,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::SpeechModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::SpeechModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TTS_1 = T.let(:"tts-1", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) TTS_1_HD = T.let(:"tts-1-hd", OpenAI::Models::Audio::SpeechModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio/transcription_create_params.rbi b/rbi/lib/openai/models/audio/transcription_create_params.rbi index c8481144..c6020c3f 100644 --- a/rbi/lib/openai/models/audio/transcription_create_params.rbi +++ b/rbi/lib/openai/models/audio/transcription_create_params.rbi @@ -137,7 +137,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::AudioModel::TaggedSymbol]) } def self.variants; end end @@ -146,14 +146,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } WORD = T.let(:word, OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity::TaggedSymbol) SEGMENT = diff --git a/rbi/lib/openai/models/audio/transcription_include.rbi b/rbi/lib/openai/models/audio/transcription_include.rbi index 732e95a4..bb5758c1 100644 --- a/rbi/lib/openai/models/audio/transcription_include.rbi +++ b/rbi/lib/openai/models/audio/transcription_include.rbi @@ -7,8 +7,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranscriptionInclude) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } LOGPROBS = T.let(:logprobs, OpenAI::Models::Audio::TranscriptionInclude::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio/translation_create_params.rbi b/rbi/lib/openai/models/audio/translation_create_params.rbi index 6d4b9e08..405309c1 100644 --- a/rbi/lib/openai/models/audio/translation_create_params.rbi +++ b/rbi/lib/openai/models/audio/translation_create_params.rbi @@ -80,7 +80,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::AudioModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::AudioModel::TaggedSymbol]) } def self.variants; end end @@ -91,8 +91,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } JSON = T.let(:json, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) TEXT = T.let(:text, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio_model.rbi b/rbi/lib/openai/models/audio_model.rbi index ab50d39a..e82aa420 100644 --- a/rbi/lib/openai/models/audio_model.rbi +++ b/rbi/lib/openai/models/audio_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AudioModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } WHISPER_1 = T.let(:"whisper-1", OpenAI::Models::AudioModel::TaggedSymbol) GPT_4O_TRANSCRIBE = T.let(:"gpt-4o-transcribe", OpenAI::Models::AudioModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/audio_response_format.rbi b/rbi/lib/openai/models/audio_response_format.rbi index 23a709ab..b7fca47d 100644 --- a/rbi/lib/openai/models/audio_response_format.rbi +++ b/rbi/lib/openai/models/audio_response_format.rbi @@ -9,7 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::AudioResponseFormat) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::AudioResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } JSON = T.let(:json, OpenAI::Models::AudioResponseFormat::TaggedSymbol) TEXT = T.let(:text, OpenAI::Models::AudioResponseFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/batch.rbi b/rbi/lib/openai/models/batch.rbi index 7542c9b1..8de778c2 100644 --- a/rbi/lib/openai/models/batch.rbi +++ b/rbi/lib/openai/models/batch.rbi @@ -203,7 +203,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Batch::Status) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Batch::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } VALIDATING = T.let(:validating, OpenAI::Models::Batch::Status::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::Batch::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/batch_create_params.rbi b/rbi/lib/openai/models/batch_create_params.rbi index 6f907cae..3a795500 100644 --- a/rbi/lib/openai/models/batch_create_params.rbi +++ b/rbi/lib/openai/models/batch_create_params.rbi @@ -71,8 +71,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } COMPLETION_WINDOW_24H = T.let(:"24h", OpenAI::Models::BatchCreateParams::CompletionWindow::TaggedSymbol) @@ -88,8 +87,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::BatchCreateParams::Endpoint) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } V1_RESPONSES = T.let(:"/v1/responses", OpenAI::Models::BatchCreateParams::Endpoint::TaggedSymbol) V1_CHAT_COMPLETIONS = diff --git a/rbi/lib/openai/models/beta/assistant_create_params.rbi b/rbi/lib/openai/models/beta/assistant_create_params.rbi index b0c7ab98..f068ee9b 100644 --- a/rbi/lib/openai/models/beta/assistant_create_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_create_params.rbi @@ -230,7 +230,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } def self.variants; end end diff --git a/rbi/lib/openai/models/beta/assistant_list_params.rbi b/rbi/lib/openai/models/beta/assistant_list_params.rbi index 52cd7565..48df52cc 100644 --- a/rbi/lib/openai/models/beta/assistant_list_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_list_params.rbi @@ -75,8 +75,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::AssistantListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi index 4b2436cd..3ef8c526 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice.rbi @@ -41,8 +41,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FUNCTION = T.let(:function, OpenAI::Models::Beta::AssistantToolChoice::Type::TaggedSymbol) CODE_INTERPRETER = diff --git a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi index ce3df8b3..51538b5e 100644 --- a/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi +++ b/rbi/lib/openai/models/beta/assistant_tool_choice_option.rbi @@ -21,8 +21,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } NONE = T.let(:none, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol) @@ -35,7 +34,7 @@ module OpenAI sig do override .returns( - [OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Beta::AssistantToolChoice] + [OpenAI::Models::Beta::AssistantToolChoiceOption::Auto::TaggedSymbol, OpenAI::Models::Beta::AssistantToolChoice] ) end def self.variants; end diff --git a/rbi/lib/openai/models/beta/assistant_update_params.rbi b/rbi/lib/openai/models/beta/assistant_update_params.rbi index 01cbc60a..8e100d6d 100644 --- a/rbi/lib/openai/models/beta/assistant_update_params.rbi +++ b/rbi/lib/openai/models/beta/assistant_update_params.rbi @@ -233,12 +233,11 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol]) } def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } GPT_4_1 = T.let(:"gpt-4.1", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) GPT_4_1_MINI = T.let(:"gpt-4.1-mini", OpenAI::Models::Beta::AssistantUpdateParams::Model::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/file_search_tool.rbi b/rbi/lib/openai/models/beta/file_search_tool.rbi index c3df4c59..ad127d97 100644 --- a/rbi/lib/openai/models/beta/file_search_tool.rbi +++ b/rbi/lib/openai/models/beta/file_search_tool.rbi @@ -127,14 +127,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi index bc0633ec..091a1cab 100644 --- a/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_and_run_params.rbi @@ -300,7 +300,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } def self.variants; end end @@ -499,8 +499,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role::TaggedSymbol) ASSISTANT = @@ -1075,14 +1074,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/thread_create_params.rbi b/rbi/lib/openai/models/beta/thread_create_params.rbi index 9d73e499..6d1ed043 100644 --- a/rbi/lib/openai/models/beta/thread_create_params.rbi +++ b/rbi/lib/openai/models/beta/thread_create_params.rbi @@ -191,8 +191,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::ThreadCreateParams::Message::Role::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_file.rbi b/rbi/lib/openai/models/beta/threads/image_file.rbi index 788ca5de..c6c13ccf 100644 --- a/rbi/lib/openai/models/beta/threads/image_file.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file.rbi @@ -34,8 +34,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFile::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi index d0d866ed..c677a276 100644 --- a/rbi/lib/openai/models/beta/threads/image_file_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_file_delta.rbi @@ -40,8 +40,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_url.rbi b/rbi/lib/openai/models/beta/threads/image_url.rbi index 27f59579..b98e88e4 100644 --- a/rbi/lib/openai/models/beta/threads/image_url.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url.rbi @@ -33,8 +33,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURL::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi index 69c06976..a9c81f3e 100644 --- a/rbi/lib/openai/models/beta/threads/image_url_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/image_url_delta.rbi @@ -38,8 +38,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message.rbi b/rbi/lib/openai/models/beta/threads/message.rbi index ae809809..eef51a4c 100644 --- a/rbi/lib/openai/models/beta/threads/message.rbi +++ b/rbi/lib/openai/models/beta/threads/message.rbi @@ -282,8 +282,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } CONTENT_FILTER = T.let(:content_filter, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason::TaggedSymbol) @@ -308,8 +307,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::Message::Role::TaggedSymbol) @@ -324,8 +322,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Message::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) INCOMPLETE = T.let(:incomplete, OpenAI::Models::Beta::Threads::Message::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message_create_params.rbi b/rbi/lib/openai/models/beta/threads/message_create_params.rbi index 94ce2228..6c57ca1f 100644 --- a/rbi/lib/openai/models/beta/threads/message_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_create_params.rbi @@ -132,8 +132,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageCreateParams::Role::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message_delta.rbi b/rbi/lib/openai/models/beta/threads/message_delta.rbi index cce9e935..6156e86b 100644 --- a/rbi/lib/openai/models/beta/threads/message_delta.rbi +++ b/rbi/lib/openai/models/beta/threads/message_delta.rbi @@ -86,8 +86,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Beta::Threads::MessageDelta::Role::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/message_list_params.rbi b/rbi/lib/openai/models/beta/threads/message_list_params.rbi index 11ea50e8..945d30d3 100644 --- a/rbi/lib/openai/models/beta/threads/message_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/message_list_params.rbi @@ -85,8 +85,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::MessageListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/run.rbi b/rbi/lib/openai/models/beta/threads/run.rbi index fd1569df..f0708be8 100644 --- a/rbi/lib/openai/models/beta/threads/run.rbi +++ b/rbi/lib/openai/models/beta/threads/run.rbi @@ -392,8 +392,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MAX_COMPLETION_TOKENS = T.let(:max_completion_tokens, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason::TaggedSymbol) @@ -432,8 +431,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::Beta::Threads::Run::LastError::Code::TaggedSymbol) RATE_LIMIT_EXCEEDED = @@ -545,8 +543,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type::TaggedSymbol) LAST_MESSAGES = diff --git a/rbi/lib/openai/models/beta/threads/run_create_params.rbi b/rbi/lib/openai/models/beta/threads/run_create_params.rbi index bfa2e42d..3d4cc20a 100644 --- a/rbi/lib/openai/models/beta/threads/run_create_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_create_params.rbi @@ -442,14 +442,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role::TaggedSymbol) @@ -564,7 +557,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } def self.variants; end end @@ -612,14 +605,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/run_list_params.rbi b/rbi/lib/openai/models/beta/threads/run_list_params.rbi index 0d5a35b2..d38cd84b 100644 --- a/rbi/lib/openai/models/beta/threads/run_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/run_list_params.rbi @@ -76,8 +76,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::RunListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/run_status.rbi b/rbi/lib/openai/models/beta/threads/run_status.rbi index b9c1490b..7eb4d991 100644 --- a/rbi/lib/openai/models/beta/threads/run_status.rbi +++ b/rbi/lib/openai/models/beta/threads/run_status.rbi @@ -11,7 +11,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::RunStatus) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } QUEUED = T.let(:queued, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::RunStatus::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi index b11ca61e..77bfcf06 100644 --- a/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/file_search_tool_call.rbi @@ -150,14 +150,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let( @@ -298,14 +291,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let( diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi index 7519ba3f..db09f851 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step.rbi @@ -209,8 +209,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code::TaggedSymbol) @@ -228,8 +227,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) CANCELLED = T.let(:cancelled, OpenAI::Models::Beta::Threads::Runs::RunStep::Status::TaggedSymbol) @@ -259,8 +257,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE_CREATION = T.let(:message_creation, OpenAI::Models::Beta::Threads::Runs::RunStep::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi index 2261f9ce..1632e2a6 100644 --- a/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/run_step_include.rbi @@ -9,8 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::RunStepInclude) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::RunStepInclude::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = T.let( diff --git a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi index abba06e2..9f8536b3 100644 --- a/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi +++ b/rbi/lib/openai/models/beta/threads/runs/step_list_params.rbi @@ -106,8 +106,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion.rbi b/rbi/lib/openai/models/chat/chat_completion.rbi index b6ba1367..43d751bd 100644 --- a/rbi/lib/openai/models/chat/chat_completion.rbi +++ b/rbi/lib/openai/models/chat/chat_completion.rbi @@ -171,8 +171,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason::TaggedSymbol) @@ -239,8 +238,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletion::ServiceTier::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi index f7bff346..231ca7da 100644 --- a/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_audio_param.rbi @@ -43,8 +43,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) AAC = T.let(:aac, OpenAI::Models::Chat::ChatCompletionAudioParam::Format::TaggedSymbol) @@ -62,12 +61,11 @@ module OpenAI module Voice extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol]) } def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ALLOY = T.let(:alloy, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) ASH = T.let(:ash, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi index abee2167..8c755f8f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_chunk.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_chunk.rbi @@ -272,8 +272,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role::TaggedSymbol) @@ -377,14 +376,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } FUNCTION = T.let(:function, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type::TaggedSymbol) @@ -409,8 +401,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } STOP = T.let(:stop, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) LENGTH = T.let(:length, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason::TaggedSymbol) @@ -478,8 +469,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi index e7e3a922..e28956c2 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_image.rbi @@ -71,14 +71,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi index d365d7c9..9e55ccdf 100644 --- a/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_content_part_input_audio.rbi @@ -72,14 +72,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } WAV = T.let(:wav, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_modality.rbi b/rbi/lib/openai/models/chat/chat_completion_modality.rbi index 34114b90..0226b92f 100644 --- a/rbi/lib/openai/models/chat/chat_completion_modality.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_modality.rbi @@ -7,8 +7,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionModality) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) AUDIO = T.let(:audio, OpenAI::Models::Chat::ChatCompletionModality::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_role.rbi b/rbi/lib/openai/models/chat/chat_completion_role.rbi index aa6fa947..9be9e3e1 100644 --- a/rbi/lib/openai/models/chat/chat_completion_role.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_role.rbi @@ -8,7 +8,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionRole) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } DEVELOPER = T.let(:developer, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::Chat::ChatCompletionRole::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi index b7979106..fbdb6fda 100644 --- a/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi +++ b/rbi/lib/openai/models/chat/chat_completion_tool_choice_option.rbi @@ -22,8 +22,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } NONE = T.let(:none, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol) @@ -36,7 +35,7 @@ module OpenAI sig do override .returns( - [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::OrSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] + [OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto::TaggedSymbol, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] ) end def self.variants; end diff --git a/rbi/lib/openai/models/chat/completion_create_params.rbi b/rbi/lib/openai/models/chat/completion_create_params.rbi index 9e8b8bd5..6e3970fc 100644 --- a/rbi/lib/openai/models/chat/completion_create_params.rbi +++ b/rbi/lib/openai/models/chat/completion_create_params.rbi @@ -539,7 +539,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ChatModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ChatModel::TaggedSymbol]) } def self.variants; end end @@ -568,14 +568,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } NONE = T.let(:none, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol) @@ -594,7 +587,7 @@ module OpenAI sig do override .returns( - [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::OrSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + [OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode::TaggedSymbol, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] ) end def self.variants; end @@ -643,8 +636,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::Modality) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) AUDIO = T.let(:audio, OpenAI::Models::Chat::CompletionCreateParams::Modality::TaggedSymbol) @@ -696,8 +688,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier::TaggedSymbol) @@ -790,14 +781,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } LOW = T.let( diff --git a/rbi/lib/openai/models/chat/completion_list_params.rbi b/rbi/lib/openai/models/chat/completion_list_params.rbi index eec03610..f83e4b89 100644 --- a/rbi/lib/openai/models/chat/completion_list_params.rbi +++ b/rbi/lib/openai/models/chat/completion_list_params.rbi @@ -76,8 +76,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::CompletionListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Chat::CompletionListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat/completions/message_list_params.rbi b/rbi/lib/openai/models/chat/completions/message_list_params.rbi index 8a39c287..a661288c 100644 --- a/rbi/lib/openai/models/chat/completions/message_list_params.rbi +++ b/rbi/lib/openai/models/chat/completions/message_list_params.rbi @@ -61,8 +61,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Chat::Completions::MessageListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/chat_model.rbi b/rbi/lib/openai/models/chat_model.rbi index 147d44a6..f4c20501 100644 --- a/rbi/lib/openai/models/chat_model.rbi +++ b/rbi/lib/openai/models/chat_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ChatModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } GPT_4_1 = T.let(:"gpt-4.1", OpenAI::Models::ChatModel::TaggedSymbol) GPT_4_1_MINI = T.let(:"gpt-4.1-mini", OpenAI::Models::ChatModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/comparison_filter.rbi b/rbi/lib/openai/models/comparison_filter.rbi index 15d111b8..19f90169 100644 --- a/rbi/lib/openai/models/comparison_filter.rbi +++ b/rbi/lib/openai/models/comparison_filter.rbi @@ -55,7 +55,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ComparisonFilter::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } EQ = T.let(:eq, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) NE = T.let(:ne, OpenAI::Models::ComparisonFilter::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/completion_choice.rbi b/rbi/lib/openai/models/completion_choice.rbi index 375563ef..4731b8da 100644 --- a/rbi/lib/openai/models/completion_choice.rbi +++ b/rbi/lib/openai/models/completion_choice.rbi @@ -57,8 +57,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionChoice::FinishReason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } STOP = T.let(:stop, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) LENGTH = T.let(:length, OpenAI::Models::CompletionChoice::FinishReason::TaggedSymbol) diff --git a/rbi/lib/openai/models/completion_create_params.rbi b/rbi/lib/openai/models/completion_create_params.rbi index d3267415..a0956081 100644 --- a/rbi/lib/openai/models/completion_create_params.rbi +++ b/rbi/lib/openai/models/completion_create_params.rbi @@ -243,12 +243,11 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol]) } def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompletionCreateParams::Model) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } GPT_3_5_TURBO_INSTRUCT = T.let(:"gpt-3.5-turbo-instruct", OpenAI::Models::CompletionCreateParams::Model::TaggedSymbol) diff --git a/rbi/lib/openai/models/compound_filter.rbi b/rbi/lib/openai/models/compound_filter.rbi index 5eee5410..4bb4c09c 100644 --- a/rbi/lib/openai/models/compound_filter.rbi +++ b/rbi/lib/openai/models/compound_filter.rbi @@ -47,7 +47,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::CompoundFilter::Type) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AND = T.let(:and, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) OR = T.let(:or, OpenAI::Models::CompoundFilter::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/embedding_create_params.rbi b/rbi/lib/openai/models/embedding_create_params.rbi index 0869c887..b19f81a0 100644 --- a/rbi/lib/openai/models/embedding_create_params.rbi +++ b/rbi/lib/openai/models/embedding_create_params.rbi @@ -111,7 +111,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::EmbeddingModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::EmbeddingModel::TaggedSymbol]) } def self.variants; end end @@ -121,8 +121,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FLOAT = T.let(:float, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) BASE64 = T.let(:base64, OpenAI::Models::EmbeddingCreateParams::EncodingFormat::TaggedSymbol) diff --git a/rbi/lib/openai/models/embedding_model.rbi b/rbi/lib/openai/models/embedding_model.rbi index 962b8546..bbaaae0a 100644 --- a/rbi/lib/openai/models/embedding_model.rbi +++ b/rbi/lib/openai/models/embedding_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EmbeddingModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::EmbeddingModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT_EMBEDDING_ADA_002 = T.let(:"text-embedding-ada-002", OpenAI::Models::EmbeddingModel::TaggedSymbol) TEXT_EMBEDDING_3_SMALL = T.let(:"text-embedding-3-small", OpenAI::Models::EmbeddingModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/eval_create_params.rbi b/rbi/lib/openai/models/eval_create_params.rbi index c35b54fa..dbec61df 100644 --- a/rbi/lib/openai/models/eval_create_params.rbi +++ b/rbi/lib/openai/models/eval_create_params.rbi @@ -382,14 +382,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } INPUT_TEXT = T.let( @@ -415,14 +408,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let( @@ -455,14 +441,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( @@ -577,14 +556,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } OUTPUT_TEXT = T.let( @@ -610,14 +582,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } ASSISTANT = T.let( @@ -640,14 +605,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( diff --git a/rbi/lib/openai/models/eval_label_model_grader.rbi b/rbi/lib/openai/models/eval_label_model_grader.rbi index af492766..40ab0320 100644 --- a/rbi/lib/openai/models/eval_label_model_grader.rbi +++ b/rbi/lib/openai/models/eval_label_model_grader.rbi @@ -154,14 +154,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } INPUT_TEXT = T.let(:input_text, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type::TaggedSymbol) @@ -180,8 +173,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role::TaggedSymbol) @@ -198,8 +190,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type::TaggedSymbol) @@ -282,8 +273,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } OUTPUT_TEXT = T.let(:output_text, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type::TaggedSymbol) @@ -302,8 +292,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/eval_list_params.rbi b/rbi/lib/openai/models/eval_list_params.rbi index bb813db2..e0e69e2a 100644 --- a/rbi/lib/openai/models/eval_list_params.rbi +++ b/rbi/lib/openai/models/eval_list_params.rbi @@ -68,7 +68,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalListParams::Order) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::EvalListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::EvalListParams::Order::TaggedSymbol) @@ -83,7 +83,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalListParams::OrderBy) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } CREATED_AT = T.let(:created_at, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) UPDATED_AT = T.let(:updated_at, OpenAI::Models::EvalListParams::OrderBy::TaggedSymbol) diff --git a/rbi/lib/openai/models/eval_string_check_grader.rbi b/rbi/lib/openai/models/eval_string_check_grader.rbi index 6a7e26c6..a7c9af35 100644 --- a/rbi/lib/openai/models/eval_string_check_grader.rbi +++ b/rbi/lib/openai/models/eval_string_check_grader.rbi @@ -56,8 +56,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalStringCheckGrader::Operation) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } EQ = T.let(:eq, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) NE = T.let(:ne, OpenAI::Models::EvalStringCheckGrader::Operation::TaggedSymbol) diff --git a/rbi/lib/openai/models/eval_text_similarity_grader.rbi b/rbi/lib/openai/models/eval_text_similarity_grader.rbi index 13ac3bcc..460cab97 100644 --- a/rbi/lib/openai/models/eval_text_similarity_grader.rbi +++ b/rbi/lib/openai/models/eval_text_similarity_grader.rbi @@ -74,8 +74,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FUZZY_MATCH = T.let(:fuzzy_match, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric::TaggedSymbol) diff --git a/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi b/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi index b8977938..83b73c77 100644 --- a/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi +++ b/rbi/lib/openai/models/evals/create_eval_completions_run_data_source.rbi @@ -270,14 +270,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } INPUT_TEXT = T.let( @@ -305,14 +298,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let( @@ -349,14 +335,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( @@ -477,14 +456,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } OUTPUT_TEXT = T.let( @@ -512,14 +484,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } ASSISTANT = T.let( @@ -546,14 +511,7 @@ module OpenAI T.type_alias do T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type) end - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let( @@ -770,8 +728,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } COMPLETIONS = T.let(:completions, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/evals/run_list_params.rbi b/rbi/lib/openai/models/evals/run_list_params.rbi index cd16d49f..5e68c583 100644 --- a/rbi/lib/openai/models/evals/run_list_params.rbi +++ b/rbi/lib/openai/models/evals/run_list_params.rbi @@ -69,8 +69,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::RunListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Evals::RunListParams::Order::TaggedSymbol) @@ -85,8 +84,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::RunListParams::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } QUEUED = T.let(:queued, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Evals::RunListParams::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi b/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi index cd6a0ed0..1d9f802f 100644 --- a/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi +++ b/rbi/lib/openai/models/evals/runs/output_item_list_params.rbi @@ -75,8 +75,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Evals::Runs::OutputItemListParams::Order::TaggedSymbol) @@ -91,8 +90,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FAIL = T.let(:fail, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) PASS = T.let(:pass, OpenAI::Models::Evals::Runs::OutputItemListParams::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/file_list_params.rbi b/rbi/lib/openai/models/file_list_params.rbi index daad5aea..69d9b1a3 100644 --- a/rbi/lib/openai/models/file_list_params.rbi +++ b/rbi/lib/openai/models/file_list_params.rbi @@ -71,7 +71,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileListParams::Order) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::FileListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::FileListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/file_object.rbi b/rbi/lib/openai/models/file_object.rbi index 15e6c24d..82c4880a 100644 --- a/rbi/lib/openai/models/file_object.rbi +++ b/rbi/lib/openai/models/file_object.rbi @@ -100,7 +100,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Purpose) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileObject::Purpose::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASSISTANTS = T.let(:assistants, OpenAI::Models::FileObject::Purpose::TaggedSymbol) ASSISTANTS_OUTPUT = T.let(:assistants_output, OpenAI::Models::FileObject::Purpose::TaggedSymbol) @@ -120,7 +120,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FileObject::Status) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FileObject::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } UPLOADED = T.let(:uploaded, OpenAI::Models::FileObject::Status::TaggedSymbol) PROCESSED = T.let(:processed, OpenAI::Models::FileObject::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/file_purpose.rbi b/rbi/lib/openai/models/file_purpose.rbi index 274edce7..7d3a2d58 100644 --- a/rbi/lib/openai/models/file_purpose.rbi +++ b/rbi/lib/openai/models/file_purpose.rbi @@ -10,7 +10,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FilePurpose) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::FilePurpose::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASSISTANTS = T.let(:assistants, OpenAI::Models::FilePurpose::TaggedSymbol) BATCH = T.let(:batch, OpenAI::Models::FilePurpose::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi index b3acb44f..938c8a10 100644 --- a/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rbi @@ -68,14 +68,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } ASCENDING = T.let(:ascending, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi index b12fca64..6e25d748 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job.rbi @@ -299,8 +299,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } VALIDATING_FILES = T.let(:validating_files, OpenAI::Models::FineTuning::FineTuningJob::Status::TaggedSymbol) @@ -604,8 +603,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) DPO = T.let(:dpo, OpenAI::Models::FineTuning::FineTuningJob::Method::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi index 85a3774e..ea515e0f 100644 --- a/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi +++ b/rbi/lib/openai/models/fine_tuning/fine_tuning_job_event.rbi @@ -83,8 +83,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } INFO = T.let(:info, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) WARN = T.let(:warn, OpenAI::Models::FineTuning::FineTuningJobEvent::Level::TaggedSymbol) @@ -99,8 +98,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) METRICS = T.let(:metrics, OpenAI::Models::FineTuning::FineTuningJobEvent::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi index 4acc7f38..13b9cfae 100644 --- a/rbi/lib/openai/models/fine_tuning/job_create_params.rbi +++ b/rbi/lib/openai/models/fine_tuning/job_create_params.rbi @@ -149,12 +149,11 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol]) } def self.variants; end TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } BABBAGE_002 = T.let(:"babbage-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) DAVINCI_002 = T.let(:"davinci-002", OpenAI::Models::FineTuning::JobCreateParams::Model::TaggedSymbol) @@ -626,8 +625,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SUPERVISED = T.let(:supervised, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) DPO = T.let(:dpo, OpenAI::Models::FineTuning::JobCreateParams::Method::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_create_variation_params.rbi b/rbi/lib/openai/models/image_create_variation_params.rbi index 580de965..44cd2758 100644 --- a/rbi/lib/openai/models/image_create_variation_params.rbi +++ b/rbi/lib/openai/models/image_create_variation_params.rbi @@ -85,7 +85,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ImageModel::TaggedSymbol]) } def self.variants; end end @@ -96,8 +96,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } URL = T.let(:url, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageCreateVariationParams::ResponseFormat::TaggedSymbol) @@ -112,8 +111,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageCreateVariationParams::Size) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageCreateVariationParams::Size::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_edit_params.rbi b/rbi/lib/openai/models/image_edit_params.rbi index 4e7a68bf..616662cf 100644 --- a/rbi/lib/openai/models/image_edit_params.rbi +++ b/rbi/lib/openai/models/image_edit_params.rbi @@ -102,7 +102,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ImageModel::TaggedSymbol]) } def self.variants; end end @@ -113,8 +113,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } URL = T.let(:url, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageEditParams::ResponseFormat::TaggedSymbol) @@ -129,7 +128,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageEditParams::Size) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageEditParams::Size::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageEditParams::Size::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_generate_params.rbi b/rbi/lib/openai/models/image_generate_params.rbi index c10c716c..d35fb1e2 100644 --- a/rbi/lib/openai/models/image_generate_params.rbi +++ b/rbi/lib/openai/models/image_generate_params.rbi @@ -104,7 +104,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ImageModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ImageModel::TaggedSymbol]) } def self.variants; end end @@ -115,8 +115,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Quality) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } STANDARD = T.let(:standard, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) HD = T.let(:hd, OpenAI::Models::ImageGenerateParams::Quality::TaggedSymbol) @@ -132,8 +131,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } URL = T.let(:url, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) B64_JSON = T.let(:b64_json, OpenAI::Models::ImageGenerateParams::ResponseFormat::TaggedSymbol) @@ -149,8 +147,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Size) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SIZE_256X256 = T.let(:"256x256", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) SIZE_512X512 = T.let(:"512x512", OpenAI::Models::ImageGenerateParams::Size::TaggedSymbol) @@ -170,8 +167,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageGenerateParams::Style) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } VIVID = T.let(:vivid, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) NATURAL = T.let(:natural, OpenAI::Models::ImageGenerateParams::Style::TaggedSymbol) diff --git a/rbi/lib/openai/models/image_model.rbi b/rbi/lib/openai/models/image_model.rbi index 0203cf30..2dd50c7c 100644 --- a/rbi/lib/openai/models/image_model.rbi +++ b/rbi/lib/openai/models/image_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ImageModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ImageModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } DALL_E_2 = T.let(:"dall-e-2", OpenAI::Models::ImageModel::TaggedSymbol) DALL_E_3 = T.let(:"dall-e-3", OpenAI::Models::ImageModel::TaggedSymbol) diff --git a/rbi/lib/openai/models/moderation.rbi b/rbi/lib/openai/models/moderation.rbi index 03bc7cb0..7c63a9f8 100644 --- a/rbi/lib/openai/models/moderation.rbi +++ b/rbi/lib/openai/models/moderation.rbi @@ -306,8 +306,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Harassment::TaggedSymbol) @@ -322,14 +321,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HarassmentThreatening::TaggedSymbol) @@ -348,8 +340,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Hate::TaggedSymbol) @@ -362,14 +353,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::HateThreatening::TaggedSymbol) @@ -385,8 +369,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Illicit::TaggedSymbol) @@ -399,8 +382,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::IllicitViolent::TaggedSymbol) @@ -416,8 +398,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarm::TaggedSymbol) @@ -431,14 +412,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmInstruction::TaggedSymbol) @@ -459,8 +433,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SelfHarmIntent::TaggedSymbol) IMAGE = @@ -478,8 +451,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Sexual::TaggedSymbol) @@ -493,8 +465,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::SexualMinor::TaggedSymbol) @@ -510,8 +481,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) IMAGE = T.let(:image, OpenAI::Models::Moderation::CategoryAppliedInputTypes::Violence::TaggedSymbol) @@ -525,14 +495,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic::TaggedSymbol) IMAGE = diff --git a/rbi/lib/openai/models/moderation_create_params.rbi b/rbi/lib/openai/models/moderation_create_params.rbi index 1e4e920e..707f700c 100644 --- a/rbi/lib/openai/models/moderation_create_params.rbi +++ b/rbi/lib/openai/models/moderation_create_params.rbi @@ -94,7 +94,7 @@ module OpenAI module Model extend OpenAI::Internal::Type::Union - sig { override.returns([String, OpenAI::Models::ModerationModel::OrSymbol]) } + sig { override.returns([String, OpenAI::Models::ModerationModel::TaggedSymbol]) } def self.variants; end end end diff --git a/rbi/lib/openai/models/moderation_model.rbi b/rbi/lib/openai/models/moderation_model.rbi index 7de0fc9c..36b6b843 100644 --- a/rbi/lib/openai/models/moderation_model.rbi +++ b/rbi/lib/openai/models/moderation_model.rbi @@ -6,7 +6,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ModerationModel) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ModerationModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } OMNI_MODERATION_LATEST = T.let(:"omni-moderation-latest", OpenAI::Models::ModerationModel::TaggedSymbol) OMNI_MODERATION_2024_09_26 = diff --git a/rbi/lib/openai/models/reasoning.rbi b/rbi/lib/openai/models/reasoning.rbi index b60585b1..1223bf2d 100644 --- a/rbi/lib/openai/models/reasoning.rbi +++ b/rbi/lib/openai/models/reasoning.rbi @@ -61,8 +61,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::GenerateSummary) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) CONCISE = T.let(:concise, OpenAI::Models::Reasoning::GenerateSummary::TaggedSymbol) @@ -79,7 +78,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Reasoning::Summary) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Reasoning::Summary::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Reasoning::Summary::TaggedSymbol) CONCISE = T.let(:concise, OpenAI::Models::Reasoning::Summary::TaggedSymbol) diff --git a/rbi/lib/openai/models/reasoning_effort.rbi b/rbi/lib/openai/models/reasoning_effort.rbi index 1459f48b..11b95ba9 100644 --- a/rbi/lib/openai/models/reasoning_effort.rbi +++ b/rbi/lib/openai/models/reasoning_effort.rbi @@ -12,7 +12,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ReasoningEffort) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::ReasoningEffort::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } LOW = T.let(:low, OpenAI::Models::ReasoningEffort::TaggedSymbol) MEDIUM = T.let(:medium, OpenAI::Models::ReasoningEffort::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/computer_tool.rbi b/rbi/lib/openai/models/responses/computer_tool.rbi index c20479e3..50bedd4b 100644 --- a/rbi/lib/openai/models/responses/computer_tool.rbi +++ b/rbi/lib/openai/models/responses/computer_tool.rbi @@ -51,8 +51,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ComputerTool::Environment) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MAC = T.let(:mac, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) WINDOWS = T.let(:windows, OpenAI::Models::Responses::ComputerTool::Environment::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/easy_input_message.rbi b/rbi/lib/openai/models/responses/easy_input_message.rbi index 6f2f6316..8efd6fd4 100644 --- a/rbi/lib/openai/models/responses/easy_input_message.rbi +++ b/rbi/lib/openai/models/responses/easy_input_message.rbi @@ -109,8 +109,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) ASSISTANT = T.let(:assistant, OpenAI::Models::Responses::EasyInputMessage::Role::TaggedSymbol) @@ -126,8 +125,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::EasyInputMessage::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::Responses::EasyInputMessage::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/file_search_tool.rbi b/rbi/lib/openai/models/responses/file_search_tool.rbi index b16985a4..6d579cea 100644 --- a/rbi/lib/openai/models/responses/file_search_tool.rbi +++ b/rbi/lib/openai/models/responses/file_search_tool.rbi @@ -129,8 +129,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker::TaggedSymbol) DEFAULT_2024_11_15 = diff --git a/rbi/lib/openai/models/responses/input_item_list_params.rbi b/rbi/lib/openai/models/responses/input_item_list_params.rbi index 302e530f..9699e037 100644 --- a/rbi/lib/openai/models/responses/input_item_list_params.rbi +++ b/rbi/lib/openai/models/responses/input_item_list_params.rbi @@ -83,8 +83,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::InputItemListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::Responses::InputItemListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response.rbi b/rbi/lib/openai/models/responses/response.rbi index 833e1023..5e5dacba 100644 --- a/rbi/lib/openai/models/responses/response.rbi +++ b/rbi/lib/openai/models/responses/response.rbi @@ -401,8 +401,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MAX_OUTPUT_TOKENS = T.let(:max_output_tokens, OpenAI::Models::Responses::Response::IncompleteDetails::Reason::TaggedSymbol) @@ -450,8 +449,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Responses::Response::ServiceTier::TaggedSymbol) @@ -472,8 +470,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::Response::Truncation) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) DISABLED = T.let(:disabled, OpenAI::Models::Responses::Response::Truncation::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi index 4f1a0e10..9c9e05b1 100644 --- a/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_code_interpreter_tool_call.rbi @@ -158,8 +158,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi index ba86d36e..d899a4bc 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call.rbi @@ -150,14 +150,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } LEFT = T.let(:left, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button::TaggedSymbol) @@ -415,8 +408,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCall::Status::TaggedSymbol) @@ -433,8 +425,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } COMPUTER_CALL = T.let(:computer_call, OpenAI::Models::Responses::ResponseComputerToolCall::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi index 64b2dc16..cbf376cc 100644 --- a/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_computer_tool_call_output_item.rbi @@ -129,8 +129,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_create_params.rbi b/rbi/lib/openai/models/responses/response_create_params.rbi index 68125cc2..64664281 100644 --- a/rbi/lib/openai/models/responses/response_create_params.rbi +++ b/rbi/lib/openai/models/responses/response_create_params.rbi @@ -455,8 +455,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) DEFAULT = T.let(:default, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier::TaggedSymbol) @@ -475,7 +474,7 @@ module OpenAI sig do override .returns( - [OpenAI::Models::Responses::ToolChoiceOptions::OrSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + [OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] ) end def self.variants; end @@ -493,8 +492,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) DISABLED = T.let(:disabled, OpenAI::Models::Responses::ResponseCreateParams::Truncation::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_error.rbi b/rbi/lib/openai/models/responses/response_error.rbi index 843f8868..3beca9b7 100644 --- a/rbi/lib/openai/models/responses/response_error.rbi +++ b/rbi/lib/openai/models/responses/response_error.rbi @@ -27,8 +27,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::Responses::ResponseError::Code::TaggedSymbol) RATE_LIMIT_EXCEEDED = diff --git a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi index 10068e96..edeca91a 100644 --- a/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_file_search_tool_call.rbi @@ -63,8 +63,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_function_tool_call.rbi b/rbi/lib/openai/models/responses/response_function_tool_call.rbi index 769270c2..5f89a8cf 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call.rbi @@ -73,8 +73,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCall::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi index 8f224c19..df902df8 100644 --- a/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi +++ b/rbi/lib/openai/models/responses/response_function_tool_call_output_item.rbi @@ -61,8 +61,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_function_web_search.rbi b/rbi/lib/openai/models/responses/response_function_web_search.rbi index 5bd85d43..4c9bd2bd 100644 --- a/rbi/lib/openai/models/responses/response_function_web_search.rbi +++ b/rbi/lib/openai/models/responses/response_function_web_search.rbi @@ -43,8 +43,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_includable.rbi b/rbi/lib/openai/models/responses/response_includable.rbi index dd8aebb9..a6d5df86 100644 --- a/rbi/lib/openai/models/responses/response_includable.rbi +++ b/rbi/lib/openai/models/responses/response_includable.rbi @@ -15,8 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseIncludable) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FILE_SEARCH_CALL_RESULTS = T.let(:"file_search_call.results", OpenAI::Models::Responses::ResponseIncludable::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_input_audio.rbi b/rbi/lib/openai/models/responses/response_input_audio.rbi index b4a5969e..ff2d0075 100644 --- a/rbi/lib/openai/models/responses/response_input_audio.rbi +++ b/rbi/lib/openai/models/responses/response_input_audio.rbi @@ -40,8 +40,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MP3 = T.let(:mp3, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) WAV = T.let(:wav, OpenAI::Models::Responses::ResponseInputAudio::Format::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_input_image.rbi b/rbi/lib/openai/models/responses/response_input_image.rbi index f6f84eac..2cf0bfec 100644 --- a/rbi/lib/openai/models/responses/response_input_image.rbi +++ b/rbi/lib/openai/models/responses/response_input_image.rbi @@ -54,8 +54,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } HIGH = T.let(:high, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) LOW = T.let(:low, OpenAI::Models::Responses::ResponseInputImage::Detail::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_input_item.rbi b/rbi/lib/openai/models/responses/response_input_item.rbi index cbcc9069..7187822f 100644 --- a/rbi/lib/openai/models/responses/response_input_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_item.rbi @@ -92,8 +92,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputItem::Message::Role::TaggedSymbol) @@ -110,8 +109,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseInputItem::Message::Status::TaggedSymbol) @@ -130,8 +128,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputItem::Message::Type::TaggedSymbol) @@ -270,14 +267,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let( @@ -360,14 +350,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status) } - OrSymbol = - T.type_alias do - T.any( - Symbol, - String, - OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status::TaggedSymbol - ) - end + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let( diff --git a/rbi/lib/openai/models/responses/response_input_message_item.rbi b/rbi/lib/openai/models/responses/response_input_message_item.rbi index 1eeebaa4..dbae6d9b 100644 --- a/rbi/lib/openai/models/responses/response_input_message_item.rbi +++ b/rbi/lib/openai/models/responses/response_input_message_item.rbi @@ -86,8 +86,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } USER = T.let(:user, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) SYSTEM = T.let(:system, OpenAI::Models::Responses::ResponseInputMessageItem::Role::TaggedSymbol) @@ -104,8 +103,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseInputMessageItem::Status::TaggedSymbol) @@ -122,8 +120,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } MESSAGE = T.let(:message, OpenAI::Models::Responses::ResponseInputMessageItem::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_output_message.rbi b/rbi/lib/openai/models/responses/response_output_message.rbi index d7a588ea..e9c25f07 100644 --- a/rbi/lib/openai/models/responses/response_output_message.rbi +++ b/rbi/lib/openai/models/responses/response_output_message.rbi @@ -79,8 +79,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseOutputMessage::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_reasoning_item.rbi b/rbi/lib/openai/models/responses/response_reasoning_item.rbi index 9d308e35..ecd2a673 100644 --- a/rbi/lib/openai/models/responses/response_reasoning_item.rbi +++ b/rbi/lib/openai/models/responses/response_reasoning_item.rbi @@ -72,8 +72,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseReasoningItem::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/response_status.rbi b/rbi/lib/openai/models/responses/response_status.rbi index a887be4c..7fb2ba80 100644 --- a/rbi/lib/openai/models/responses/response_status.rbi +++ b/rbi/lib/openai/models/responses/response_status.rbi @@ -9,8 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ResponseStatus) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } COMPLETED = T.let(:completed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) FAILED = T.let(:failed, OpenAI::Models::Responses::ResponseStatus::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/tool_choice_options.rbi b/rbi/lib/openai/models/responses/tool_choice_options.rbi index ccc45284..793acf3c 100644 --- a/rbi/lib/openai/models/responses/tool_choice_options.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_options.rbi @@ -15,8 +15,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceOptions) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } NONE = T.let(:none, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) AUTO = T.let(:auto, OpenAI::Models::Responses::ToolChoiceOptions::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses/tool_choice_types.rbi b/rbi/lib/openai/models/responses/tool_choice_types.rbi index ece62dd0..1b8ed0b6 100644 --- a/rbi/lib/openai/models/responses/tool_choice_types.rbi +++ b/rbi/lib/openai/models/responses/tool_choice_types.rbi @@ -35,8 +35,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } FILE_SEARCH = T.let(:file_search, OpenAI::Models::Responses::ToolChoiceTypes::Type::TaggedSymbol) WEB_SEARCH_PREVIEW = diff --git a/rbi/lib/openai/models/responses/web_search_tool.rbi b/rbi/lib/openai/models/responses/web_search_tool.rbi index 75ccebd6..9b23c93c 100644 --- a/rbi/lib/openai/models/responses/web_search_tool.rbi +++ b/rbi/lib/openai/models/responses/web_search_tool.rbi @@ -63,8 +63,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } WEB_SEARCH_PREVIEW = T.let(:web_search_preview, OpenAI::Models::Responses::WebSearchTool::Type::TaggedSymbol) @@ -82,8 +81,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } LOW = T.let(:low, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) MEDIUM = T.let(:medium, OpenAI::Models::Responses::WebSearchTool::SearchContextSize::TaggedSymbol) diff --git a/rbi/lib/openai/models/responses_model.rbi b/rbi/lib/openai/models/responses_model.rbi index 1ae31d0c..24670d55 100644 --- a/rbi/lib/openai/models/responses_model.rbi +++ b/rbi/lib/openai/models/responses_model.rbi @@ -9,8 +9,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } O1_PRO = T.let(:"o1-pro", OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol) O1_PRO_2025_03_19 = @@ -30,7 +29,7 @@ module OpenAI sig do override .returns( - [String, OpenAI::Models::ChatModel::OrSymbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel::OrSymbol] + [String, OpenAI::Models::ChatModel::TaggedSymbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel::TaggedSymbol] ) end def self.variants; end diff --git a/rbi/lib/openai/models/upload.rbi b/rbi/lib/openai/models/upload.rbi index 7a9a7404..083dbd10 100644 --- a/rbi/lib/openai/models/upload.rbi +++ b/rbi/lib/openai/models/upload.rbi @@ -95,7 +95,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::Upload::Status) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::Upload::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } PENDING = T.let(:pending, OpenAI::Models::Upload::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::Upload::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_store.rbi b/rbi/lib/openai/models/vector_store.rbi index 3426c9f4..dd704e90 100644 --- a/rbi/lib/openai/models/vector_store.rbi +++ b/rbi/lib/openai/models/vector_store.rbi @@ -163,7 +163,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStore::Status) } - OrSymbol = T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStore::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } EXPIRED = T.let(:expired, OpenAI::Models::VectorStore::Status::TaggedSymbol) IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStore::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_store_list_params.rbi b/rbi/lib/openai/models/vector_store_list_params.rbi index e40b3b45..802bf5c5 100644 --- a/rbi/lib/openai/models/vector_store_list_params.rbi +++ b/rbi/lib/openai/models/vector_store_list_params.rbi @@ -74,8 +74,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStoreListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_store_search_params.rbi b/rbi/lib/openai/models/vector_store_search_params.rbi index e5456dff..47525fb1 100644 --- a/rbi/lib/openai/models/vector_store_search_params.rbi +++ b/rbi/lib/openai/models/vector_store_search_params.rbi @@ -139,8 +139,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } AUTO = T.let(:auto, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker::TaggedSymbol) DEFAULT_2024_11_15 = diff --git a/rbi/lib/openai/models/vector_store_search_response.rbi b/rbi/lib/openai/models/vector_store_search_response.rbi index 454d54a1..fdfceacf 100644 --- a/rbi/lib/openai/models/vector_store_search_response.rbi +++ b/rbi/lib/openai/models/vector_store_search_response.rbi @@ -86,8 +86,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } TEXT = T.let(:text, OpenAI::Models::VectorStoreSearchResponse::Content::Type::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi index 8911fb0e..041e3c17 100644 --- a/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_batch_list_files_params.rbi @@ -98,8 +98,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter::TaggedSymbol) @@ -120,8 +119,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/file_list_params.rbi b/rbi/lib/openai/models/vector_stores/file_list_params.rbi index 10960b9c..0ef2cb3a 100644 --- a/rbi/lib/openai/models/vector_stores/file_list_params.rbi +++ b/rbi/lib/openai/models/vector_stores/file_list_params.rbi @@ -83,8 +83,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Filter) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::FileListParams::Filter::TaggedSymbol) @@ -101,8 +100,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::FileListParams::Order) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } ASC = T.let(:asc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) DESC = T.let(:desc, OpenAI::Models::VectorStores::FileListParams::Order::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi index 6bf82fb0..72356d37 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file.rbi @@ -156,8 +156,7 @@ module OpenAI TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } SERVER_ERROR = T.let(:server_error, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code::TaggedSymbol) @@ -178,8 +177,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) COMPLETED = T.let(:completed, OpenAI::Models::VectorStores::VectorStoreFile::Status::TaggedSymbol) diff --git a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi index cef32c64..4b1e6a60 100644 --- a/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi +++ b/rbi/lib/openai/models/vector_stores/vector_store_file_batch.rbi @@ -129,8 +129,7 @@ module OpenAI extend OpenAI::Internal::Type::Enum TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status) } - OrSymbol = - T.type_alias { T.any(Symbol, String, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) } + OrSymbol = T.type_alias { T.any(Symbol, String) } IN_PROGRESS = T.let(:in_progress, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status::TaggedSymbol) From b5fba2e689884dc011dec9fd2d8349c9c842d274 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 11:12:30 +0000 Subject: [PATCH 09/15] chore: use `@!method` instead of `@!parse` for virtual method type definitions --- .yardopts | 1 + lib/openai/internal/type/base_model.rb | 2 + lib/openai/internal/type/enum.rb | 7 +- lib/openai/models/all_models.rb | 12 +- .../models/audio/speech_create_params.rb | 37 +- lib/openai/models/audio/speech_model.rb | 7 +- lib/openai/models/audio/transcription.rb | 28 +- .../audio/transcription_create_params.rb | 49 +- .../audio/transcription_create_response.rb | 5 +- .../models/audio/transcription_include.rb | 7 +- .../models/audio/transcription_segment.rb | 40 +- .../audio/transcription_stream_event.rb | 5 +- .../audio/transcription_text_delta_event.rb | 34 +- .../audio/transcription_text_done_event.rb | 34 +- .../models/audio/transcription_verbose.rb | 22 +- lib/openai/models/audio/transcription_word.rb | 12 +- lib/openai/models/audio/translation.rb | 8 +- .../models/audio/translation_create_params.rb | 30 +- .../audio/translation_create_response.rb | 5 +- .../models/audio/translation_verbose.rb | 14 +- lib/openai/models/audio_model.rb | 7 +- lib/openai/models/audio_response_format.rb | 7 +- .../auto_file_chunking_strategy_param.rb | 14 +- lib/openai/models/batch.rb | 87 ++-- lib/openai/models/batch_cancel_params.rb | 8 +- lib/openai/models/batch_create_params.rb | 30 +- lib/openai/models/batch_error.rb | 14 +- lib/openai/models/batch_list_params.rb | 12 +- lib/openai/models/batch_request_counts.rb | 16 +- lib/openai/models/batch_retrieve_params.rb | 8 +- lib/openai/models/beta/assistant.rb | 89 ++-- .../models/beta/assistant_create_params.rb | 140 ++---- .../models/beta/assistant_delete_params.rb | 8 +- lib/openai/models/beta/assistant_deleted.rb | 12 +- .../models/beta/assistant_list_params.rb | 23 +- .../beta/assistant_response_format_option.rb | 5 +- .../models/beta/assistant_retrieve_params.rb | 8 +- .../models/beta/assistant_stream_event.rb | 419 +++++++----------- lib/openai/models/beta/assistant_tool.rb | 5 +- .../models/beta/assistant_tool_choice.rb | 23 +- .../beta/assistant_tool_choice_function.rb | 8 +- .../beta/assistant_tool_choice_option.rb | 12 +- .../models/beta/assistant_update_params.rb | 87 ++-- .../models/beta/code_interpreter_tool.rb | 8 +- lib/openai/models/beta/file_search_tool.rb | 55 +-- lib/openai/models/beta/function_tool.rb | 10 +- .../models/beta/message_stream_event.rb | 95 ++-- .../models/beta/run_step_stream_event.rb | 131 +++--- lib/openai/models/beta/run_stream_event.rb | 165 +++---- lib/openai/models/beta/thread.rb | 58 +-- .../beta/thread_create_and_run_params.rb | 283 ++++-------- .../models/beta/thread_create_params.rb | 152 +++---- .../models/beta/thread_delete_params.rb | 8 +- lib/openai/models/beta/thread_deleted.rb | 12 +- .../models/beta/thread_retrieve_params.rb | 8 +- lib/openai/models/beta/thread_stream_event.rb | 20 +- .../models/beta/thread_update_params.rb | 48 +- lib/openai/models/beta/threads/annotation.rb | 5 +- .../models/beta/threads/annotation_delta.rb | 5 +- .../beta/threads/file_citation_annotation.rb | 32 +- .../threads/file_citation_delta_annotation.rb | 36 +- .../beta/threads/file_path_annotation.rb | 30 +- .../threads/file_path_delta_annotation.rb | 32 +- lib/openai/models/beta/threads/image_file.rb | 17 +- .../beta/threads/image_file_content_block.rb | 16 +- .../models/beta/threads/image_file_delta.rb | 17 +- .../beta/threads/image_file_delta_block.rb | 18 +- lib/openai/models/beta/threads/image_url.rb | 17 +- .../beta/threads/image_url_content_block.rb | 14 +- .../models/beta/threads/image_url_delta.rb | 17 +- .../beta/threads/image_url_delta_block.rb | 16 +- lib/openai/models/beta/threads/message.rb | 114 ++--- .../models/beta/threads/message_content.rb | 5 +- .../beta/threads/message_content_delta.rb | 5 +- .../threads/message_content_part_param.rb | 5 +- .../beta/threads/message_create_params.rb | 51 +-- .../beta/threads/message_delete_params.rb | 10 +- .../models/beta/threads/message_deleted.rb | 12 +- .../models/beta/threads/message_delta.rb | 21 +- .../beta/threads/message_delta_event.rb | 18 +- .../beta/threads/message_list_params.rb | 25 +- .../beta/threads/message_retrieve_params.rb | 10 +- .../beta/threads/message_update_params.rb | 12 +- .../beta/threads/refusal_content_block.rb | 14 +- .../beta/threads/refusal_delta_block.rb | 16 +- .../required_action_function_tool_call.rb | 30 +- lib/openai/models/beta/threads/run.rb | 208 +++------ .../models/beta/threads/run_cancel_params.rb | 10 +- .../models/beta/threads/run_create_params.rb | 141 ++---- .../models/beta/threads/run_list_params.rb | 23 +- .../beta/threads/run_retrieve_params.rb | 10 +- lib/openai/models/beta/threads/run_status.rb | 7 +- .../threads/run_submit_tool_outputs_params.rb | 22 +- .../models/beta/threads/run_update_params.rb | 12 +- .../threads/runs/code_interpreter_logs.rb | 16 +- .../runs/code_interpreter_output_image.rb | 20 +- .../runs/code_interpreter_tool_call.rb | 67 +-- .../runs/code_interpreter_tool_call_delta.rb | 37 +- .../threads/runs/file_search_tool_call.rb | 82 ++-- .../runs/file_search_tool_call_delta.rb | 14 +- .../beta/threads/runs/function_tool_call.rb | 28 +- .../threads/runs/function_tool_call_delta.rb | 30 +- .../runs/message_creation_step_details.rb | 22 +- .../models/beta/threads/runs/run_step.rb | 122 ++--- .../beta/threads/runs/run_step_delta.rb | 17 +- .../beta/threads/runs/run_step_delta_event.rb | 18 +- .../runs/run_step_delta_message_delta.rb | 22 +- .../beta/threads/runs/run_step_include.rb | 7 +- .../beta/threads/runs/step_list_params.rb | 27 +- .../beta/threads/runs/step_retrieve_params.rb | 14 +- .../models/beta/threads/runs/tool_call.rb | 5 +- .../beta/threads/runs/tool_call_delta.rb | 5 +- .../threads/runs/tool_call_delta_object.rb | 14 +- .../threads/runs/tool_calls_step_details.rb | 14 +- lib/openai/models/beta/threads/text.rb | 10 +- .../models/beta/threads/text_content_block.rb | 14 +- .../beta/threads/text_content_block_param.rb | 14 +- lib/openai/models/beta/threads/text_delta.rb | 10 +- .../models/beta/threads/text_delta_block.rb | 16 +- lib/openai/models/chat/chat_completion.rb | 82 ++-- ...chat_completion_assistant_message_param.rb | 75 ++-- .../models/chat/chat_completion_audio.rb | 22 +- .../chat/chat_completion_audio_param.rb | 30 +- .../models/chat/chat_completion_chunk.rb | 158 +++---- .../chat/chat_completion_content_part.rb | 33 +- .../chat_completion_content_part_image.rb | 31 +- ...hat_completion_content_part_input_audio.rb | 31 +- .../chat_completion_content_part_refusal.rb | 10 +- .../chat/chat_completion_content_part_text.rb | 16 +- .../models/chat/chat_completion_deleted.rb | 12 +- ...chat_completion_developer_message_param.rb | 25 +- .../chat_completion_function_call_option.rb | 14 +- .../chat_completion_function_message_param.rb | 12 +- .../models/chat/chat_completion_message.rb | 83 ++-- .../chat/chat_completion_message_param.rb | 5 +- .../chat/chat_completion_message_tool_call.rb | 26 +- .../models/chat/chat_completion_modality.rb | 7 +- .../chat/chat_completion_named_tool_choice.rb | 24 +- .../chat_completion_prediction_content.rb | 21 +- .../models/chat/chat_completion_role.rb | 7 +- .../chat/chat_completion_store_message.rb | 12 +- .../chat/chat_completion_stream_options.rb | 12 +- .../chat_completion_system_message_param.rb | 25 +- .../chat/chat_completion_token_logprob.rb | 26 +- .../models/chat/chat_completion_tool.rb | 10 +- .../chat_completion_tool_choice_option.rb | 12 +- .../chat_completion_tool_message_param.rb | 17 +- .../chat_completion_user_message_param.rb | 23 +- .../models/chat/completion_create_params.rb | 213 +++------ .../models/chat/completion_delete_params.rb | 8 +- .../models/chat/completion_list_params.rb | 25 +- .../models/chat/completion_retrieve_params.rb | 8 +- .../models/chat/completion_update_params.rb | 10 +- .../chat/completions/message_list_params.rb | 21 +- lib/openai/models/chat_model.rb | 7 +- lib/openai/models/comparison_filter.rb | 30 +- lib/openai/models/completion.rb | 26 +- lib/openai/models/completion_choice.rb | 35 +- lib/openai/models/completion_create_params.rb | 79 ++-- lib/openai/models/completion_usage.rb | 69 +-- lib/openai/models/compound_filter.rb | 26 +- .../models/create_embedding_response.rb | 28 +- lib/openai/models/embedding.rb | 16 +- lib/openai/models/embedding_create_params.rb | 35 +- lib/openai/models/embedding_model.rb | 7 +- lib/openai/models/error_object.rb | 14 +- lib/openai/models/eval_create_params.rb | 207 +++------ lib/openai/models/eval_create_response.rb | 58 +-- .../models/eval_custom_data_source_config.rb | 24 +- lib/openai/models/eval_delete_params.rb | 8 +- lib/openai/models/eval_delete_response.rb | 12 +- lib/openai/models/eval_label_model_grader.rb | 108 ++--- lib/openai/models/eval_list_params.rb | 30 +- lib/openai/models/eval_list_response.rb | 58 +-- lib/openai/models/eval_retrieve_params.rb | 8 +- lib/openai/models/eval_retrieve_response.rb | 58 +-- ...l_stored_completions_data_source_config.rb | 24 +- lib/openai/models/eval_string_check_grader.rb | 29 +- .../models/eval_text_similarity_grader.rb | 29 +- lib/openai/models/eval_update_params.rb | 12 +- lib/openai/models/eval_update_response.rb | 58 +-- ...create_eval_completions_run_data_source.rb | 224 ++++------ .../create_eval_jsonl_run_data_source.rb | 51 +-- lib/openai/models/evals/eval_api_error.rb | 14 +- lib/openai/models/evals/run_cancel_params.rb | 10 +- .../models/evals/run_cancel_response.rb | 109 ++--- lib/openai/models/evals/run_create_params.rb | 19 +- .../models/evals/run_create_response.rb | 109 ++--- lib/openai/models/evals/run_delete_params.rb | 10 +- .../models/evals/run_delete_response.rb | 12 +- lib/openai/models/evals/run_list_params.rb | 30 +- lib/openai/models/evals/run_list_response.rb | 109 ++--- .../models/evals/run_retrieve_params.rb | 10 +- .../models/evals/run_retrieve_response.rb | 109 ++--- .../evals/runs/output_item_list_params.rb | 32 +- .../evals/runs/output_item_list_response.rb | 130 ++---- .../evals/runs/output_item_retrieve_params.rb | 12 +- .../runs/output_item_retrieve_response.rb | 130 ++---- lib/openai/models/file_chunking_strategy.rb | 5 +- .../models/file_chunking_strategy_param.rb | 5 +- lib/openai/models/file_content_params.rb | 8 +- lib/openai/models/file_create_params.rb | 12 +- lib/openai/models/file_delete_params.rb | 8 +- lib/openai/models/file_deleted.rb | 12 +- lib/openai/models/file_list_params.rb | 23 +- lib/openai/models/file_object.rb | 55 +-- lib/openai/models/file_purpose.rb | 7 +- lib/openai/models/file_retrieve_params.rb | 8 +- .../checkpoints/permission_create_params.rb | 10 +- .../checkpoints/permission_create_response.rb | 20 +- .../checkpoints/permission_delete_params.rb | 8 +- .../checkpoints/permission_delete_response.rb | 12 +- .../checkpoints/permission_retrieve_params.rb | 23 +- .../permission_retrieve_response.rb | 36 +- .../models/fine_tuning/fine_tuning_job.rb | 247 ++++------- .../fine_tuning/fine_tuning_job_event.rb | 38 +- .../fine_tuning_job_wandb_integration.rb | 24 +- ...ine_tuning_job_wandb_integration_object.rb | 10 +- .../models/fine_tuning/job_cancel_params.rb | 8 +- .../models/fine_tuning/job_create_params.rb | 228 ++++------ .../fine_tuning/job_list_events_params.rb | 12 +- .../models/fine_tuning/job_list_params.rb | 14 +- .../models/fine_tuning/job_retrieve_params.rb | 8 +- .../jobs/checkpoint_list_params.rb | 12 +- .../jobs/fine_tuning_job_checkpoint.rb | 72 +-- lib/openai/models/function_definition.rb | 14 +- lib/openai/models/image.rb | 16 +- .../models/image_create_variation_params.rb | 39 +- lib/openai/models/image_edit_params.rb | 56 +-- lib/openai/models/image_generate_params.rb | 70 +-- lib/openai/models/image_model.rb | 7 +- lib/openai/models/images_response.rb | 10 +- lib/openai/models/model.rb | 18 +- lib/openai/models/model_delete_params.rb | 8 +- lib/openai/models/model_deleted.rb | 12 +- lib/openai/models/model_list_params.rb | 8 +- lib/openai/models/model_retrieve_params.rb | 8 +- lib/openai/models/moderation.rb | 264 ++++------- lib/openai/models/moderation_create_params.rb | 22 +- .../models/moderation_create_response.rb | 16 +- .../models/moderation_image_url_input.rb | 26 +- lib/openai/models/moderation_model.rb | 7 +- .../models/moderation_multi_modal_input.rb | 5 +- lib/openai/models/moderation_text_input.rb | 14 +- .../other_file_chunking_strategy_object.rb | 16 +- lib/openai/models/reasoning.rb | 36 +- lib/openai/models/reasoning_effort.rb | 7 +- .../models/response_format_json_object.rb | 16 +- .../models/response_format_json_schema.rb | 36 +- lib/openai/models/response_format_text.rb | 12 +- lib/openai/models/responses/computer_tool.rb | 27 +- .../models/responses/easy_input_message.rb | 43 +- .../models/responses/file_search_tool.rb | 50 +-- lib/openai/models/responses/function_tool.rb | 24 +- .../responses/input_item_list_params.rb | 25 +- lib/openai/models/responses/response.rb | 117 ++--- .../responses/response_audio_delta_event.rb | 14 +- .../responses/response_audio_done_event.rb | 12 +- .../response_audio_transcript_delta_event.rb | 14 +- .../response_audio_transcript_done_event.rb | 12 +- ..._code_interpreter_call_code_delta_event.rb | 16 +- ...e_code_interpreter_call_code_done_event.rb | 16 +- ...e_code_interpreter_call_completed_event.rb | 16 +- ...code_interpreter_call_in_progress_event.rb | 16 +- ...ode_interpreter_call_interpreting_event.rb | 16 +- .../response_code_interpreter_tool_call.rb | 70 ++- .../responses/response_completed_event.rb | 14 +- .../responses/response_computer_tool_call.rb | 218 ++++----- ...response_computer_tool_call_output_item.rb | 41 +- ...se_computer_tool_call_output_screenshot.rb | 16 +- .../models/responses/response_content.rb | 5 +- .../response_content_part_added_event.rb | 25 +- .../response_content_part_done_event.rb | 25 +- .../responses/response_create_params.rb | 91 ++-- .../responses/response_created_event.rb | 14 +- .../responses/response_delete_params.rb | 8 +- lib/openai/models/responses/response_error.rb | 21 +- .../models/responses/response_error_event.rb | 18 +- .../models/responses/response_failed_event.rb | 14 +- ...sponse_file_search_call_completed_event.rb | 16 +- ...onse_file_search_call_in_progress_event.rb | 16 +- ...sponse_file_search_call_searching_event.rb | 16 +- .../response_file_search_tool_call.rb | 52 +-- .../responses/response_format_text_config.rb | 5 +- ...response_format_text_json_schema_config.rb | 24 +- ...nse_function_call_arguments_delta_event.rb | 18 +- ...onse_function_call_arguments_done_event.rb | 18 +- .../responses/response_function_tool_call.rb | 33 +- .../response_function_tool_call_item.rb | 16 +- ...response_function_tool_call_output_item.rb | 23 +- .../responses/response_function_web_search.rb | 27 +- .../responses/response_in_progress_event.rb | 14 +- .../models/responses/response_includable.rb | 7 +- .../responses/response_incomplete_event.rb | 14 +- .../models/responses/response_input_audio.rb | 23 +- .../responses/response_input_content.rb | 5 +- .../models/responses/response_input_file.rb | 18 +- .../models/responses/response_input_image.rb | 27 +- .../models/responses/response_input_item.rb | 134 ++---- .../responses/response_input_message_item.rb | 37 +- .../models/responses/response_input_text.rb | 14 +- lib/openai/models/responses/response_item.rb | 5 +- .../models/responses/response_item_list.rb | 20 +- .../models/responses/response_output_audio.rb | 16 +- .../models/responses/response_output_item.rb | 5 +- .../response_output_item_added_event.rb | 16 +- .../response_output_item_done_event.rb | 16 +- .../responses/response_output_message.rb | 32 +- .../responses/response_output_refusal.rb | 14 +- .../models/responses/response_output_text.rb | 73 ++- .../responses/response_reasoning_item.rb | 37 +- .../responses/response_refusal_delta_event.rb | 20 +- .../responses/response_refusal_done_event.rb | 20 +- .../responses/response_retrieve_params.rb | 10 +- .../models/responses/response_status.rb | 7 +- .../models/responses/response_stream_event.rb | 5 +- .../response_text_annotation_delta_event.rb | 89 ++-- .../models/responses/response_text_config.rb | 20 +- .../responses/response_text_delta_event.rb | 20 +- .../responses/response_text_done_event.rb | 20 +- lib/openai/models/responses/response_usage.rb | 46 +- ...esponse_web_search_call_completed_event.rb | 16 +- ...ponse_web_search_call_in_progress_event.rb | 16 +- ...esponse_web_search_call_searching_event.rb | 16 +- lib/openai/models/responses/tool.rb | 5 +- .../models/responses/tool_choice_function.rb | 14 +- .../models/responses/tool_choice_options.rb | 7 +- .../models/responses/tool_choice_types.rb | 21 +- .../models/responses/web_search_tool.rb | 50 +-- lib/openai/models/responses_model.rb | 12 +- .../models/static_file_chunking_strategy.rb | 10 +- .../static_file_chunking_strategy_object.rb | 10 +- ...tic_file_chunking_strategy_object_param.rb | 14 +- lib/openai/models/upload.rb | 35 +- lib/openai/models/upload_cancel_params.rb | 8 +- lib/openai/models/upload_complete_params.rb | 12 +- lib/openai/models/upload_create_params.rb | 16 +- .../models/uploads/part_create_params.rb | 10 +- lib/openai/models/uploads/upload_part.rb | 18 +- lib/openai/models/vector_store.rb | 86 ++-- .../models/vector_store_create_params.rb | 42 +- .../models/vector_store_delete_params.rb | 8 +- lib/openai/models/vector_store_deleted.rb | 12 +- lib/openai/models/vector_store_list_params.rb | 23 +- .../models/vector_store_retrieve_params.rb | 8 +- .../models/vector_store_search_params.rb | 59 +-- .../models/vector_store_search_response.rb | 38 +- .../models/vector_store_update_params.rb | 28 +- .../vector_stores/file_batch_cancel_params.rb | 10 +- .../vector_stores/file_batch_create_params.rb | 19 +- .../file_batch_list_files_params.rb | 34 +- .../file_batch_retrieve_params.rb | 10 +- .../vector_stores/file_content_params.rb | 10 +- .../vector_stores/file_content_response.rb | 10 +- .../vector_stores/file_create_params.rb | 19 +- .../vector_stores/file_delete_params.rb | 10 +- .../models/vector_stores/file_list_params.rb | 32 +- .../vector_stores/file_retrieve_params.rb | 10 +- .../vector_stores/file_update_params.rb | 17 +- .../models/vector_stores/vector_store_file.rb | 76 +--- .../vector_stores/vector_store_file_batch.rb | 45 +- .../vector_store_file_deleted.rb | 12 +- lib/openai/request_options.rb | 7 +- rbi/lib/openai/internal/type/base_model.rbi | 1 + rbi/lib/openai/internal/type/enum.rbi | 6 - sig/openai/internal/type/enum.rbs | 2 - test/openai/internal/type/base_model_test.rb | 2 + 367 files changed, 4185 insertions(+), 7968 deletions(-) diff --git a/.yardopts b/.yardopts index 004c697b..5757768a 100644 --- a/.yardopts +++ b/.yardopts @@ -1,3 +1,4 @@ +--type-name-tag generic:Generic --markup markdown --markup-provider redcarpet --exclude /rbi diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index 239dbfd3..e0849cca 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -339,6 +339,8 @@ def deconstruct_keys(keys) end class << self + # @api private + # # @param model [OpenAI::Internal::Type::BaseModel] # # @return [Hash{Symbol=>Object}] diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index e850de7b..8f14e58b 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -46,12 +46,7 @@ module Enum # All of the valid Symbol values for this enum. # # @return [Array] - def values = (@values ||= constants.map { const_get(_1) }) - - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - private def finalize! = values + def values = constants.map { const_get(_1) } # @param other [Object] # diff --git a/lib/openai/models/all_models.rb b/lib/openai/models/all_models.rb index 0abbe3bc..d79c7182 100644 --- a/lib/openai/models/all_models.rb +++ b/lib/openai/models/all_models.rb @@ -19,16 +19,12 @@ module ResponsesOnlyModel COMPUTER_USE_PREVIEW = :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::AllModels::ResponsesOnlyModel)] end end end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index ba364d38..7c59f76f 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -64,18 +64,14 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :speed - # @!parse - # # @param input [String] - # # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] - # # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] - # # @param instructions [String] - # # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] - # # @param speed [Float] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) + # @param input [String] + # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] + # @param voice [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] + # @param instructions [String] + # @param response_format [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] + # @param speed [Float] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # One of the available [TTS models](https://platform.openai.com/docs/models#tts): # `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. @@ -87,9 +83,8 @@ module Model # One of the available [TTS models](https://platform.openai.com/docs/models#tts): `tts-1`, `tts-1-hd` or `gpt-4o-mini-tts`. variant enum: -> { OpenAI::Models::Audio::SpeechModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::Audio::SpeechModel)] end # The voice to use when generating the audio. Supported voices are `alloy`, `ash`, @@ -123,9 +118,8 @@ module Voice variant const: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice::VERSE } - # @!parse - # # @return [Array(String, Symbol)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol)] # @!group @@ -156,11 +150,8 @@ module ResponseFormat WAV = :wav PCM = :pcm - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio/speech_model.rb b/lib/openai/models/audio/speech_model.rb index 504b7319..39245ea8 100644 --- a/lib/openai/models/audio/speech_model.rb +++ b/lib/openai/models/audio/speech_model.rb @@ -10,11 +10,8 @@ module SpeechModel TTS_1_HD = :"tts-1-hd" GPT_4O_MINI_TTS = :"gpt-4o-mini-tts" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index ead34e3d..0e0cb142 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -22,16 +22,12 @@ class Transcription < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :logprobs - # @!parse - # # Represents a transcription response returned by model, based on the provided - # # input. - # # - # # @param text [String] - # # @param logprobs [Array] - # # - # def initialize(text:, logprobs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, logprobs: nil) + # Represents a transcription response returned by model, based on the provided + # input. + # + # @param text [String] + # @param logprobs [Array] class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute [r] token @@ -64,14 +60,10 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :logprob - # @!parse - # # @param token [String] - # # @param bytes [Array] - # # @param logprob [Float] - # # - # def initialize(token: nil, bytes: nil, logprob: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(token: nil, bytes: nil, logprob: nil) + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] end end end diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index c883db5d..259a21cb 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -107,33 +107,16 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :timestamp_granularities - # @!parse - # # @param file [Pathname, StringIO] - # # @param model [String, Symbol, OpenAI::Models::AudioModel] - # # @param include [Array] - # # @param language [String] - # # @param prompt [String] - # # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] - # # @param temperature [Float] - # # @param timestamp_granularities [Array] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # file:, - # model:, - # include: nil, - # language: nil, - # prompt: nil, - # response_format: nil, - # temperature: nil, - # timestamp_granularities: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) + # @param file [Pathname, StringIO] + # @param model [String, Symbol, OpenAI::Models::AudioModel] + # @param include [Array] + # @param language [String] + # @param prompt [String] + # @param response_format [Symbol, OpenAI::Models::AudioResponseFormat] + # @param temperature [Float] + # @param timestamp_granularities [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. The options are `gpt-4o-transcribe`, # `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source @@ -146,9 +129,8 @@ module Model # ID of the model to use. The options are `gpt-4o-transcribe`, `gpt-4o-mini-transcribe`, and `whisper-1` (which is powered by our open source Whisper V2 model). variant enum: -> { OpenAI::Models::AudioModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] end module TimestampGranularity @@ -157,11 +139,8 @@ module TimestampGranularity WORD = :word SEGMENT = :segment - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio/transcription_create_response.rb b/lib/openai/models/audio/transcription_create_response.rb index 69d68b0b..52850e6a 100644 --- a/lib/openai/models/audio/transcription_create_response.rb +++ b/lib/openai/models/audio/transcription_create_response.rb @@ -18,9 +18,8 @@ module TranscriptionCreateResponse # Represents a verbose json transcription response returned by model, based on the provided input. variant -> { OpenAI::Models::Audio::TranscriptionVerbose } - # @!parse - # # @return [Array(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Audio::Transcription, OpenAI::Models::Audio::TranscriptionVerbose)] end end end diff --git a/lib/openai/models/audio/transcription_include.rb b/lib/openai/models/audio/transcription_include.rb index 5cde821b..2351452b 100644 --- a/lib/openai/models/audio/transcription_include.rb +++ b/lib/openai/models/audio/transcription_include.rb @@ -8,11 +8,8 @@ module TranscriptionInclude LOGPROBS = :logprobs - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio/transcription_segment.rb b/lib/openai/models/audio/transcription_segment.rb index 852a77d1..19923d2d 100644 --- a/lib/openai/models/audio/transcription_segment.rb +++ b/lib/openai/models/audio/transcription_segment.rb @@ -67,35 +67,17 @@ class TranscriptionSegment < OpenAI::Internal::Type::BaseModel # @return [Array] required :tokens, OpenAI::Internal::Type::ArrayOf[Integer] - # @!parse - # # @param id [Integer] - # # @param avg_logprob [Float] - # # @param compression_ratio [Float] - # # @param end_ [Float] - # # @param no_speech_prob [Float] - # # @param seek [Integer] - # # @param start [Float] - # # @param temperature [Float] - # # @param text [String] - # # @param tokens [Array] - # # - # def initialize( - # id:, - # avg_logprob:, - # compression_ratio:, - # end_:, - # no_speech_prob:, - # seek:, - # start:, - # temperature:, - # text:, - # tokens:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, avg_logprob:, compression_ratio:, end_:, no_speech_prob:, seek:, start:, temperature:, text:, tokens:) + # @param id [Integer] + # @param avg_logprob [Float] + # @param compression_ratio [Float] + # @param end_ [Float] + # @param no_speech_prob [Float] + # @param seek [Integer] + # @param start [Float] + # @param temperature [Float] + # @param text [String] + # @param tokens [Array] end end end diff --git a/lib/openai/models/audio/transcription_stream_event.rb b/lib/openai/models/audio/transcription_stream_event.rb index de3b63d5..d4a5f12e 100644 --- a/lib/openai/models/audio/transcription_stream_event.rb +++ b/lib/openai/models/audio/transcription_stream_event.rb @@ -18,9 +18,8 @@ module TranscriptionStreamEvent # Emitted when the transcription is complete. Contains the complete transcription text. Only emitted when you [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) with the `Stream` parameter set to `true`. variant :"transcript.text.done", -> { OpenAI::Models::Audio::TranscriptionTextDoneEvent } - # @!parse - # # @return [Array(OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Audio::TranscriptionTextDeltaEvent, OpenAI::Models::Audio::TranscriptionTextDoneEvent)] end end end diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index 731bf107..f7dff312 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -29,19 +29,15 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :logprobs - # @!parse - # # Emitted when there is an additional text delta. This is also the first event - # # emitted when the transcription starts. Only emitted when you - # # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # # with the `Stream` parameter set to `true`. - # # - # # @param delta [String] - # # @param logprobs [Array] - # # @param type [Symbol, :"transcript.text.delta"] - # # - # def initialize(delta:, logprobs: nil, type: :"transcript.text.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, logprobs: nil, type: :"transcript.text.delta") + # Emitted when there is an additional text delta. This is also the first event + # emitted when the transcription starts. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. + # + # @param delta [String] + # @param logprobs [Array] + # @param type [Symbol, :"transcript.text.delta"] class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute [r] token @@ -74,14 +70,10 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :logprob - # @!parse - # # @param token [String] - # # @param bytes [Array] - # # @param logprob [Float] - # # - # def initialize(token: nil, bytes: nil, logprob: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(token: nil, bytes: nil, logprob: nil) + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] end end end diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index be1ee0fe..3e0fb33c 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -30,19 +30,15 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :logprobs - # @!parse - # # Emitted when the transcription is complete. Contains the complete transcription - # # text. Only emitted when you - # # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) - # # with the `Stream` parameter set to `true`. - # # - # # @param text [String] - # # @param logprobs [Array] - # # @param type [Symbol, :"transcript.text.done"] - # # - # def initialize(text:, logprobs: nil, type: :"transcript.text.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, logprobs: nil, type: :"transcript.text.done") + # Emitted when the transcription is complete. Contains the complete transcription + # text. Only emitted when you + # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) + # with the `Stream` parameter set to `true`. + # + # @param text [String] + # @param logprobs [Array] + # @param type [Symbol, :"transcript.text.done"] class Logprob < OpenAI::Internal::Type::BaseModel # @!attribute [r] token @@ -75,14 +71,10 @@ class Logprob < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :logprob - # @!parse - # # @param token [String] - # # @param bytes [Array] - # # @param logprob [Float] - # # - # def initialize(token: nil, bytes: nil, logprob: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(token: nil, bytes: nil, logprob: nil) + # @param token [String] + # @param bytes [Array] + # @param logprob [Float] end end end diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index 80068f85..f0b3f7c3 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -42,19 +42,15 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :words - # @!parse - # # Represents a verbose json transcription response returned by model, based on the - # # provided input. - # # - # # @param duration [Float] - # # @param language [String] - # # @param text [String] - # # @param segments [Array] - # # @param words [Array] - # # - # def initialize(duration:, language:, text:, segments: nil, words: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(duration:, language:, text:, segments: nil, words: nil) + # Represents a verbose json transcription response returned by model, based on the + # provided input. + # + # @param duration [Float] + # @param language [String] + # @param text [String] + # @param segments [Array] + # @param words [Array] end end end diff --git a/lib/openai/models/audio/transcription_word.rb b/lib/openai/models/audio/transcription_word.rb index 5df30687..f7f973cd 100644 --- a/lib/openai/models/audio/transcription_word.rb +++ b/lib/openai/models/audio/transcription_word.rb @@ -22,14 +22,10 @@ class TranscriptionWord < OpenAI::Internal::Type::BaseModel # @return [String] required :word, String - # @!parse - # # @param end_ [Float] - # # @param start [Float] - # # @param word [String] - # # - # def initialize(end_:, start:, word:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_:, start:, word:) + # @param end_ [Float] + # @param start [Float] + # @param word [String] end end end diff --git a/lib/openai/models/audio/translation.rb b/lib/openai/models/audio/translation.rb index c9631757..635498f8 100644 --- a/lib/openai/models/audio/translation.rb +++ b/lib/openai/models/audio/translation.rb @@ -9,12 +9,8 @@ class Translation < OpenAI::Internal::Type::BaseModel # @return [String] required :text, String - # @!parse - # # @param text [String] - # # - # def initialize(text:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:) + # @param text [String] end end end diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 51cc325c..7589e685 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -61,17 +61,13 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :temperature - # @!parse - # # @param file [Pathname, StringIO] - # # @param model [String, Symbol, OpenAI::Models::AudioModel] - # # @param prompt [String] - # # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] - # # @param temperature [Float] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) + # @param file [Pathname, StringIO] + # @param model [String, Symbol, OpenAI::Models::AudioModel] + # @param prompt [String] + # @param response_format [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] + # @param temperature [Float] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. Only `whisper-1` (which is powered by our open source # Whisper V2 model) is currently available. @@ -83,9 +79,8 @@ module Model # ID of the model to use. Only `whisper-1` (which is powered by our open source Whisper V2 model) is currently available. variant enum: -> { OpenAI::Models::AudioModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::AudioModel)] end # The format of the output, in one of these options: `json`, `text`, `srt`, @@ -99,11 +94,8 @@ module ResponseFormat VERBOSE_JSON = :verbose_json VTT = :vtt - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio/translation_create_response.rb b/lib/openai/models/audio/translation_create_response.rb index f50f6c86..49d7cc9e 100644 --- a/lib/openai/models/audio/translation_create_response.rb +++ b/lib/openai/models/audio/translation_create_response.rb @@ -11,9 +11,8 @@ module TranslationCreateResponse variant -> { OpenAI::Models::Audio::TranslationVerbose } - # @!parse - # # @return [Array(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Audio::Translation, OpenAI::Models::Audio::TranslationVerbose)] end end end diff --git a/lib/openai/models/audio/translation_verbose.rb b/lib/openai/models/audio/translation_verbose.rb index b8a970d0..5d802ffc 100644 --- a/lib/openai/models/audio/translation_verbose.rb +++ b/lib/openai/models/audio/translation_verbose.rb @@ -32,15 +32,11 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :segments - # @!parse - # # @param duration [Float] - # # @param language [String] - # # @param text [String] - # # @param segments [Array] - # # - # def initialize(duration:, language:, text:, segments: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(duration:, language:, text:, segments: nil) + # @param duration [Float] + # @param language [String] + # @param text [String] + # @param segments [Array] end end end diff --git a/lib/openai/models/audio_model.rb b/lib/openai/models/audio_model.rb index 4373ee6b..8e0e194e 100644 --- a/lib/openai/models/audio_model.rb +++ b/lib/openai/models/audio_model.rb @@ -9,11 +9,8 @@ module AudioModel GPT_4O_TRANSCRIBE = :"gpt-4o-transcribe" GPT_4O_MINI_TRANSCRIBE = :"gpt-4o-mini-transcribe" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/audio_response_format.rb b/lib/openai/models/audio_response_format.rb index 2babfc83..5644ca89 100644 --- a/lib/openai/models/audio_response_format.rb +++ b/lib/openai/models/audio_response_format.rb @@ -14,11 +14,8 @@ module AudioResponseFormat VERBOSE_JSON = :verbose_json VTT = :vtt - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/auto_file_chunking_strategy_param.rb b/lib/openai/models/auto_file_chunking_strategy_param.rb index a33c9dcc..9065ad1c 100644 --- a/lib/openai/models/auto_file_chunking_strategy_param.rb +++ b/lib/openai/models/auto_file_chunking_strategy_param.rb @@ -9,15 +9,11 @@ class AutoFileChunkingStrategyParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :auto] required :type, const: :auto - # @!parse - # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. - # # - # # @param type [Symbol, :auto] - # # - # def initialize(type: :auto, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :auto) + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. + # + # @param type [Symbol, :auto] end end end diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index a490b124..0eb7ef45 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -175,55 +175,27 @@ class Batch < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::BatchRequestCounts] # attr_writer :request_counts - # @!parse - # # @param id [String] - # # @param completion_window [String] - # # @param created_at [Integer] - # # @param endpoint [String] - # # @param input_file_id [String] - # # @param status [Symbol, OpenAI::Models::Batch::Status] - # # @param cancelled_at [Integer] - # # @param cancelling_at [Integer] - # # @param completed_at [Integer] - # # @param error_file_id [String] - # # @param errors [OpenAI::Models::Batch::Errors] - # # @param expired_at [Integer] - # # @param expires_at [Integer] - # # @param failed_at [Integer] - # # @param finalizing_at [Integer] - # # @param in_progress_at [Integer] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param output_file_id [String] - # # @param request_counts [OpenAI::Models::BatchRequestCounts] - # # @param object [Symbol, :batch] - # # - # def initialize( - # id:, - # completion_window:, - # created_at:, - # endpoint:, - # input_file_id:, - # status:, - # cancelled_at: nil, - # cancelling_at: nil, - # completed_at: nil, - # error_file_id: nil, - # errors: nil, - # expired_at: nil, - # expires_at: nil, - # failed_at: nil, - # finalizing_at: nil, - # in_progress_at: nil, - # metadata: nil, - # output_file_id: nil, - # request_counts: nil, - # object: :batch, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, completion_window:, created_at:, endpoint:, input_file_id:, status:, cancelled_at: nil, cancelling_at: nil, completed_at: nil, error_file_id: nil, errors: nil, expired_at: nil, expires_at: nil, failed_at: nil, finalizing_at: nil, in_progress_at: nil, metadata: nil, output_file_id: nil, request_counts: nil, object: :batch) + # @param id [String] + # @param completion_window [String] + # @param created_at [Integer] + # @param endpoint [String] + # @param input_file_id [String] + # @param status [Symbol, OpenAI::Models::Batch::Status] + # @param cancelled_at [Integer] + # @param cancelling_at [Integer] + # @param completed_at [Integer] + # @param error_file_id [String] + # @param errors [OpenAI::Models::Batch::Errors] + # @param expired_at [Integer] + # @param expires_at [Integer] + # @param failed_at [Integer] + # @param finalizing_at [Integer] + # @param in_progress_at [Integer] + # @param metadata [Hash{Symbol=>String}, nil] + # @param output_file_id [String] + # @param request_counts [OpenAI::Models::BatchRequestCounts] + # @param object [Symbol, :batch] # The current status of the batch. # @@ -240,11 +212,8 @@ module Status CANCELLING = :cancelling CANCELLED = :cancelled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Batch#errors @@ -268,13 +237,9 @@ class Errors < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :object - # @!parse - # # @param data [Array] - # # @param object [String] - # # - # def initialize(data: nil, object: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data: nil, object: nil) + # @param data [Array] + # @param object [String] end end end diff --git a/lib/openai/models/batch_cancel_params.rb b/lib/openai/models/batch_cancel_params.rb index 4e1871e0..60cd89b8 100644 --- a/lib/openai/models/batch_cancel_params.rb +++ b/lib/openai/models/batch_cancel_params.rb @@ -8,12 +8,8 @@ class BatchCancelParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index 95548d81..ef913352 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -49,16 +49,12 @@ class BatchCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] - # # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] - # # @param input_file_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(completion_window:, endpoint:, input_file_id:, metadata: nil, request_options: {}) + # @param completion_window [Symbol, OpenAI::Models::BatchCreateParams::CompletionWindow] + # @param endpoint [Symbol, OpenAI::Models::BatchCreateParams::Endpoint] + # @param input_file_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The time frame within which the batch should be processed. Currently only `24h` # is supported. @@ -67,11 +63,8 @@ module CompletionWindow COMPLETION_WINDOW_24H = :"24h" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The endpoint to be used for all requests in the batch. Currently @@ -86,11 +79,8 @@ module Endpoint V1_EMBEDDINGS = :"/v1/embeddings" V1_COMPLETIONS = :"/v1/completions" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/batch_error.rb b/lib/openai/models/batch_error.rb index 3d31ed4a..513fcaf3 100644 --- a/lib/openai/models/batch_error.rb +++ b/lib/openai/models/batch_error.rb @@ -35,15 +35,11 @@ class BatchError < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :param, String, nil?: true - # @!parse - # # @param code [String] - # # @param line [Integer, nil] - # # @param message [String] - # # @param param [String, nil] - # # - # def initialize(code: nil, line: nil, message: nil, param: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code: nil, line: nil, message: nil, param: nil) + # @param code [String] + # @param line [Integer, nil] + # @param message [String] + # @param param [String, nil] end end end diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index 675cc802..ab35f805 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -32,14 +32,10 @@ class BatchListParams < OpenAI::Internal::Type::BaseModel # # @return [Integer] # attr_writer :limit - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/batch_request_counts.rb b/lib/openai/models/batch_request_counts.rb index 92ab5316..dce46757 100644 --- a/lib/openai/models/batch_request_counts.rb +++ b/lib/openai/models/batch_request_counts.rb @@ -21,16 +21,12 @@ class BatchRequestCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # The request counts for different statuses within the batch. - # # - # # @param completed [Integer] - # # @param failed [Integer] - # # @param total [Integer] - # # - # def initialize(completed:, failed:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(completed:, failed:, total:) + # The request counts for different statuses within the batch. + # + # @param completed [Integer] + # @param failed [Integer] + # @param total [Integer] end end end diff --git a/lib/openai/models/batch_retrieve_params.rb b/lib/openai/models/batch_retrieve_params.rb index fac34345..6c9e459c 100644 --- a/lib/openai/models/batch_retrieve_params.rb +++ b/lib/openai/models/batch_retrieve_params.rb @@ -8,12 +8,8 @@ class BatchRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index bd04e326..6f861ccc 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -123,43 +123,22 @@ class Assistant < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!parse - # # Represents an `assistant` that can call the model and use tools. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param description [String, nil] - # # @param instructions [String, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param name [String, nil] - # # @param tools [Array] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param temperature [Float, nil] - # # @param tool_resources [OpenAI::Models::Beta::Assistant::ToolResources, nil] - # # @param top_p [Float, nil] - # # @param object [Symbol, :assistant] - # # - # def initialize( - # id:, - # created_at:, - # description:, - # instructions:, - # metadata:, - # model:, - # name:, - # tools:, - # response_format: nil, - # temperature: nil, - # tool_resources: nil, - # top_p: nil, - # object: :assistant, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, description:, instructions:, metadata:, model:, name:, tools:, response_format: nil, temperature: nil, tool_resources: nil, top_p: nil, object: :assistant) + # Represents an `assistant` that can call the model and use tools. + # + # @param id [String] + # @param created_at [Integer] + # @param description [String, nil] + # @param instructions [String, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param name [String, nil] + # @param tools [Array] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_resources [OpenAI::Models::Beta::Assistant::ToolResources, nil] + # @param top_p [Float, nil] + # @param object [Symbol, :assistant] # @see OpenAI::Models::Beta::Assistant#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel @@ -181,18 +160,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] # @see OpenAI::Models::Beta::Assistant::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -208,12 +183,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::Assistant::ToolResources#file_search @@ -231,12 +202,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_store_ids - # @!parse - # # @param vector_store_ids [Array] - # # - # def initialize(vector_store_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil) + # @param vector_store_ids [Array] end end end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 3e3fd1d0..5c2c0fbe 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -124,39 +124,19 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!parse - # # @param model [String, Symbol, OpenAI::Models::ChatModel] - # # @param description [String, nil] - # # @param instructions [String, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String, nil] - # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param temperature [Float, nil] - # # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] - # # @param tools [Array] - # # @param top_p [Float, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # model:, - # description: nil, - # instructions: nil, - # metadata: nil, - # name: nil, - # reasoning_effort: nil, - # response_format: nil, - # temperature: nil, - # tool_resources: nil, - # tools: nil, - # top_p: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(model:, description: nil, instructions: nil, metadata: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) + # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param description [String, nil] + # @param instructions [String, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_resources [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] + # @param tools [Array] + # @param top_p [Float, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -171,9 +151,8 @@ module Model # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. variant enum: -> { OpenAI::Models::ChatModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class ToolResources < OpenAI::Internal::Type::BaseModel @@ -196,18 +175,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -223,12 +198,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search @@ -260,13 +231,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_stores - # @!parse - # # @param vector_store_ids [Array] - # # @param vector_stores [Array] - # # - # def initialize(vector_store_ids: nil, vector_stores: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil, vector_stores: nil) + # @param vector_store_ids [Array] + # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy @@ -304,14 +271,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param chunking_strategy [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # # @param file_ids [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(chunking_strategy: nil, file_ids: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) + # @param chunking_strategy [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + # @param file_ids [Array] + # @param metadata [Hash{Symbol=>String}, nil] # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -336,15 +299,11 @@ class Auto < OpenAI::Internal::Type::BaseModel # @return [Symbol, :auto] required :type, const: :auto - # @!parse - # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. - # # - # # @param type [Symbol, :auto] - # # - # def initialize(type: :auto, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :auto) + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. + # + # @param type [Symbol, :auto] end class Static < OpenAI::Internal::Type::BaseModel @@ -360,13 +319,9 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Symbol, :static] required :type, const: :static - # @!parse - # # @param static [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] - # # @param type [Symbol, :static] - # # - # def initialize(static:, type: :static, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(static:, type: :static) + # @param static [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param type [Symbol, :static] # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel @@ -385,19 +340,14 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Integer] required :max_chunk_size_tokens, Integer - # @!parse - # # @param chunk_overlap_tokens [Integer] - # # @param max_chunk_size_tokens [Integer] - # # - # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) + # @param chunk_overlap_tokens [Integer] + # @param max_chunk_size_tokens [Integer] end end - # @!parse - # # @return [Array(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end diff --git a/lib/openai/models/beta/assistant_delete_params.rb b/lib/openai/models/beta/assistant_delete_params.rb index aed0abc7..df76595d 100644 --- a/lib/openai/models/beta/assistant_delete_params.rb +++ b/lib/openai/models/beta/assistant_delete_params.rb @@ -9,12 +9,8 @@ class AssistantDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/assistant_deleted.rb b/lib/openai/models/beta/assistant_deleted.rb index 61cba341..9183aceb 100644 --- a/lib/openai/models/beta/assistant_deleted.rb +++ b/lib/openai/models/beta/assistant_deleted.rb @@ -20,14 +20,10 @@ class AssistantDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"assistant.deleted"] required :object, const: :"assistant.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"assistant.deleted"] - # # - # def initialize(id:, deleted:, object: :"assistant.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"assistant.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"assistant.deleted"] end end end diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index d46562ae..c35334fc 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -57,16 +57,12 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param before [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -76,11 +72,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/assistant_response_format_option.rb b/lib/openai/models/beta/assistant_response_format_option.rb index f1e5fc86..8c471773 100644 --- a/lib/openai/models/beta/assistant_response_format_option.rb +++ b/lib/openai/models/beta/assistant_response_format_option.rb @@ -42,9 +42,8 @@ module AssistantResponseFormatOption # Learn more about [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). variant -> { OpenAI::Models::ResponseFormatJSONSchema } - # @!parse - # # @return [Array(Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema)] end end end diff --git a/lib/openai/models/beta/assistant_retrieve_params.rb b/lib/openai/models/beta/assistant_retrieve_params.rb index 1a86f690..6bb8b075 100644 --- a/lib/openai/models/beta/assistant_retrieve_params.rb +++ b/lib/openai/models/beta/assistant_retrieve_params.rb @@ -9,12 +9,8 @@ class AssistantRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index 5663b14f..e8f7fefa 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -132,18 +132,14 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :enabled - # @!parse - # # Occurs when a new - # # [thread](https://platform.openai.com/docs/api-reference/threads/object) is - # # created. - # # - # # @param data [OpenAI::Models::Beta::Thread] - # # @param enabled [Boolean] - # # @param event [Symbol, :"thread.created"] - # # - # def initialize(data:, enabled: nil, event: :"thread.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, enabled: nil, event: :"thread.created") + # Occurs when a new + # [thread](https://platform.openai.com/docs/api-reference/threads/object) is + # created. + # + # @param data [OpenAI::Models::Beta::Thread] + # @param enabled [Boolean] + # @param event [Symbol, :"thread.created"] end class ThreadRunCreated < OpenAI::Internal::Type::BaseModel @@ -159,16 +155,12 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.created"] required :event, const: :"thread.run.created" - # @!parse - # # Occurs when a new - # # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.created"] - # # - # def initialize(data:, event: :"thread.run.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.created") + # Occurs when a new + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.created"] end class ThreadRunQueued < OpenAI::Internal::Type::BaseModel @@ -184,16 +176,12 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.queued"] required :event, const: :"thread.run.queued" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `queued` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.queued"] - # # - # def initialize(data:, event: :"thread.run.queued", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.queued") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `queued` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.queued"] end class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel @@ -209,16 +197,12 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.in_progress"] required :event, const: :"thread.run.in_progress" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to an `in_progress` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.in_progress"] - # # - # def initialize(data:, event: :"thread.run.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.in_progress") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to an `in_progress` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.in_progress"] end class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel @@ -234,16 +218,12 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.requires_action"] required :event, const: :"thread.run.requires_action" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `requires_action` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.requires_action"] - # # - # def initialize(data:, event: :"thread.run.requires_action", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.requires_action") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `requires_action` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.requires_action"] end class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel @@ -259,16 +239,12 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.completed"] required :event, const: :"thread.run.completed" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.completed"] - # # - # def initialize(data:, event: :"thread.run.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.completed") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.completed"] end class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel @@ -284,16 +260,12 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.incomplete"] required :event, const: :"thread.run.incomplete" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # ends with status `incomplete`. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.incomplete"] - # # - # def initialize(data:, event: :"thread.run.incomplete", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.incomplete") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # ends with status `incomplete`. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.incomplete"] end class ThreadRunFailed < OpenAI::Internal::Type::BaseModel @@ -309,16 +281,12 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.failed"] required :event, const: :"thread.run.failed" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # fails. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.failed"] - # # - # def initialize(data:, event: :"thread.run.failed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.failed") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # fails. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.failed"] end class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel @@ -334,16 +302,12 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.cancelling"] required :event, const: :"thread.run.cancelling" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `cancelling` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.cancelling"] - # # - # def initialize(data:, event: :"thread.run.cancelling", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.cancelling") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `cancelling` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.cancelling"] end class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel @@ -359,16 +323,12 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.cancelled"] required :event, const: :"thread.run.cancelled" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is cancelled. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.cancelled"] - # # - # def initialize(data:, event: :"thread.run.cancelled", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.cancelled") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is cancelled. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.cancelled"] end class ThreadRunExpired < OpenAI::Internal::Type::BaseModel @@ -384,16 +344,12 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.expired"] required :event, const: :"thread.run.expired" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # expires. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.expired"] - # # - # def initialize(data:, event: :"thread.run.expired", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.expired") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # expires. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.expired"] end class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel @@ -408,17 +364,13 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.created"] required :event, const: :"thread.run.step.created" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.created"] - # # - # def initialize(data:, event: :"thread.run.step.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.created") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.created"] end class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel @@ -433,17 +385,13 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.in_progress"] required :event, const: :"thread.run.step.in_progress" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # moves to an `in_progress` state. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.in_progress"] - # # - # def initialize(data:, event: :"thread.run.step.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.in_progress") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # moves to an `in_progress` state. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.in_progress"] end class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel @@ -459,17 +407,13 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.delta"] required :event, const: :"thread.run.step.delta" - # @!parse - # # Occurs when parts of a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # are being streamed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] - # # @param event [Symbol, :"thread.run.step.delta"] - # # - # def initialize(data:, event: :"thread.run.step.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.delta") + # Occurs when parts of a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # are being streamed. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] + # @param event [Symbol, :"thread.run.step.delta"] end class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel @@ -484,17 +428,13 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.completed"] required :event, const: :"thread.run.step.completed" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.completed"] - # # - # def initialize(data:, event: :"thread.run.step.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.completed") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.completed"] end class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel @@ -509,17 +449,13 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.failed"] required :event, const: :"thread.run.step.failed" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # fails. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.failed"] - # # - # def initialize(data:, event: :"thread.run.step.failed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.failed") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # fails. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.failed"] end class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel @@ -534,17 +470,13 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.cancelled"] required :event, const: :"thread.run.step.cancelled" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is cancelled. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.cancelled"] - # # - # def initialize(data:, event: :"thread.run.step.cancelled", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.cancelled") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is cancelled. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.cancelled"] end class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel @@ -559,17 +491,13 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.expired"] required :event, const: :"thread.run.step.expired" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # expires. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.expired"] - # # - # def initialize(data:, event: :"thread.run.step.expired", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.expired") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # expires. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.expired"] end class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel @@ -585,17 +513,13 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.created"] required :event, const: :"thread.message.created" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.created"] - # # - # def initialize(data:, event: :"thread.message.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.created") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.created"] end class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel @@ -611,17 +535,13 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.in_progress"] required :event, const: :"thread.message.in_progress" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) moves - # # to an `in_progress` state. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.in_progress"] - # # - # def initialize(data:, event: :"thread.message.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.in_progress") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) moves + # to an `in_progress` state. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.in_progress"] end class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel @@ -637,17 +557,13 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.delta"] required :event, const: :"thread.message.delta" - # @!parse - # # Occurs when parts of a - # # [Message](https://platform.openai.com/docs/api-reference/messages/object) are - # # being streamed. - # # - # # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] - # # @param event [Symbol, :"thread.message.delta"] - # # - # def initialize(data:, event: :"thread.message.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.delta") + # Occurs when parts of a + # [Message](https://platform.openai.com/docs/api-reference/messages/object) are + # being streamed. + # + # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] + # @param event [Symbol, :"thread.message.delta"] end class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel @@ -663,17 +579,13 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.completed"] required :event, const: :"thread.message.completed" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.completed"] - # # - # def initialize(data:, event: :"thread.message.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.completed") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # completed. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.completed"] end class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel @@ -689,17 +601,13 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.incomplete"] required :event, const: :"thread.message.incomplete" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) ends - # # before it is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.incomplete"] - # # - # def initialize(data:, event: :"thread.message.incomplete", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.incomplete") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) ends + # before it is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.incomplete"] end class ErrorEvent < OpenAI::Internal::Type::BaseModel @@ -713,22 +621,17 @@ class ErrorEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :error] required :event, const: :error - # @!parse - # # Occurs when an - # # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. - # # This can happen due to an internal server error or a timeout. - # # - # # @param data [OpenAI::Models::ErrorObject] - # # @param event [Symbol, :error] - # # - # def initialize(data:, event: :error, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :error) + # Occurs when an + # [error](https://platform.openai.com/docs/guides/error-codes#api-errors) occurs. + # This can happen due to an internal server error or a timeout. + # + # @param data [OpenAI::Models::ErrorObject] + # @param event [Symbol, :error] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::AssistantStreamEvent::ThreadCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::AssistantStreamEvent::ThreadRunStepExpired, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::AssistantStreamEvent::ThreadMessageIncomplete, OpenAI::Models::Beta::AssistantStreamEvent::ErrorEvent)] end end end diff --git a/lib/openai/models/beta/assistant_tool.rb b/lib/openai/models/beta/assistant_tool.rb index 0921b4bb..0272eb94 100644 --- a/lib/openai/models/beta/assistant_tool.rb +++ b/lib/openai/models/beta/assistant_tool.rb @@ -14,9 +14,8 @@ module AssistantTool variant :function, -> { OpenAI::Models::Beta::FunctionTool } - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] end end end diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 1dff1877..21e29156 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -19,16 +19,12 @@ class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::AssistantToolChoiceFunction] # attr_writer :function - # @!parse - # # Specifies a tool the model should use. Use to force the model to call a specific - # # tool. - # # - # # @param type [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] - # # @param function [OpenAI::Models::Beta::AssistantToolChoiceFunction] - # # - # def initialize(type:, function: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:, function: nil) + # Specifies a tool the model should use. Use to force the model to call a specific + # tool. + # + # @param type [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] + # @param function [OpenAI::Models::Beta::AssistantToolChoiceFunction] # The type of the tool. If type is `function`, the function name must be set # @@ -40,11 +36,8 @@ module Type CODE_INTERPRETER = :code_interpreter FILE_SEARCH = :file_search - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/assistant_tool_choice_function.rb b/lib/openai/models/beta/assistant_tool_choice_function.rb index 19ca8d48..87065a84 100644 --- a/lib/openai/models/beta/assistant_tool_choice_function.rb +++ b/lib/openai/models/beta/assistant_tool_choice_function.rb @@ -10,12 +10,8 @@ class AssistantToolChoiceFunction < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # @param name [String] - # # - # def initialize(name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:) + # @param name [String] end end end diff --git a/lib/openai/models/beta/assistant_tool_choice_option.rb b/lib/openai/models/beta/assistant_tool_choice_option.rb index 69293cbc..f9d205e3 100644 --- a/lib/openai/models/beta/assistant_tool_choice_option.rb +++ b/lib/openai/models/beta/assistant_tool_choice_option.rb @@ -30,16 +30,12 @@ module Auto AUTO = :auto REQUIRED = :required - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end - # @!parse - # # @return [Array(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice)] end end end diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index e740ca15..6cb2ed05 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -128,39 +128,19 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!parse - # # @param description [String, nil] - # # @param instructions [String, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] - # # @param name [String, nil] - # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param temperature [Float, nil] - # # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] - # # @param tools [Array] - # # @param top_p [Float, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # description: nil, - # instructions: nil, - # metadata: nil, - # model: nil, - # name: nil, - # reasoning_effort: nil, - # response_format: nil, - # temperature: nil, - # tool_resources: nil, - # tools: nil, - # top_p: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(description: nil, instructions: nil, metadata: nil, model: nil, name: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_resources: nil, tools: nil, top_p: nil, request_options: {}) + # @param description [String, nil] + # @param instructions [String, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] + # @param name [String, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_resources [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] + # @param tools [Array] + # @param top_p [Float, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -244,9 +224,8 @@ module Model variant const: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model::GPT_3_5_TURBO_16K_0613 } - # @!parse - # # @return [Array(String, Symbol)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol)] # @!group @@ -310,18 +289,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -338,12 +313,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search @@ -361,12 +332,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_store_ids - # @!parse - # # @param vector_store_ids [Array] - # # - # def initialize(vector_store_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil) + # @param vector_store_ids [Array] end end end diff --git a/lib/openai/models/beta/code_interpreter_tool.rb b/lib/openai/models/beta/code_interpreter_tool.rb index 3f3013ad..ffea0d31 100644 --- a/lib/openai/models/beta/code_interpreter_tool.rb +++ b/lib/openai/models/beta/code_interpreter_tool.rb @@ -10,12 +10,8 @@ class CodeInterpreterTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter] required :type, const: :code_interpreter - # @!parse - # # @param type [Symbol, :code_interpreter] - # # - # def initialize(type: :code_interpreter, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :code_interpreter) + # @param type [Symbol, :code_interpreter] end end end diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index 76343e2f..b9baee3e 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -20,13 +20,9 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch] # attr_writer :file_search - # @!parse - # # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] - # # @param type [Symbol, :file_search] - # # - # def initialize(file_search: nil, type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_search: nil, type: :file_search) + # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] + # @param type [Symbol, :file_search] # @see OpenAI::Models::Beta::FileSearchTool#file_search class FileSearch < OpenAI::Internal::Type::BaseModel @@ -62,15 +58,11 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] # attr_writer :ranking_options - # @!parse - # # Overrides for the file search tool. - # # - # # @param max_num_results [Integer] - # # @param ranking_options [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] - # # - # def initialize(max_num_results: nil, ranking_options: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(max_num_results: nil, ranking_options: nil) + # Overrides for the file search tool. + # + # @param max_num_results [Integer] + # @param ranking_options [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] # @see OpenAI::Models::Beta::FileSearchTool::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel @@ -92,20 +84,16 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] # attr_writer :ranker - # @!parse - # # The ranking options for the file search. If not specified, the file search tool - # # will use the `auto` ranker and a score_threshold of 0. - # # - # # See the - # # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) - # # for more information. - # # - # # @param score_threshold [Float] - # # @param ranker [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] - # # - # def initialize(score_threshold:, ranker: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(score_threshold:, ranker: nil) + # The ranking options for the file search. If not specified, the file search tool + # will use the `auto` ranker and a score_threshold of 0. + # + # See the + # [file search tool documentation](https://platform.openai.com/docs/assistants/tools/file-search#customizing-file-search-settings) + # for more information. + # + # @param score_threshold [Float] + # @param ranker [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -117,11 +105,8 @@ module Ranker AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/function_tool.rb b/lib/openai/models/beta/function_tool.rb index ed879754..bce8c29a 100644 --- a/lib/openai/models/beta/function_tool.rb +++ b/lib/openai/models/beta/function_tool.rb @@ -15,13 +15,9 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # @param function [OpenAI::Models::FunctionDefinition] - # # @param type [Symbol, :function] - # # - # def initialize(function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(function:, type: :function) + # @param function [OpenAI::Models::FunctionDefinition] + # @param type [Symbol, :function] end end end diff --git a/lib/openai/models/beta/message_stream_event.rb b/lib/openai/models/beta/message_stream_event.rb index 09590507..74bb507a 100644 --- a/lib/openai/models/beta/message_stream_event.rb +++ b/lib/openai/models/beta/message_stream_event.rb @@ -42,17 +42,13 @@ class ThreadMessageCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.created"] required :event, const: :"thread.message.created" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.created"] - # # - # def initialize(data:, event: :"thread.message.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.created") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # created. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.created"] end class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel @@ -68,17 +64,13 @@ class ThreadMessageInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.in_progress"] required :event, const: :"thread.message.in_progress" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) moves - # # to an `in_progress` state. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.in_progress"] - # # - # def initialize(data:, event: :"thread.message.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.in_progress") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) moves + # to an `in_progress` state. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.in_progress"] end class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel @@ -94,17 +86,13 @@ class ThreadMessageDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.delta"] required :event, const: :"thread.message.delta" - # @!parse - # # Occurs when parts of a - # # [Message](https://platform.openai.com/docs/api-reference/messages/object) are - # # being streamed. - # # - # # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] - # # @param event [Symbol, :"thread.message.delta"] - # # - # def initialize(data:, event: :"thread.message.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.delta") + # Occurs when parts of a + # [Message](https://platform.openai.com/docs/api-reference/messages/object) are + # being streamed. + # + # @param data [OpenAI::Models::Beta::Threads::MessageDeltaEvent] + # @param event [Symbol, :"thread.message.delta"] end class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel @@ -120,17 +108,13 @@ class ThreadMessageCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.completed"] required :event, const: :"thread.message.completed" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) is - # # completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.completed"] - # # - # def initialize(data:, event: :"thread.message.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.completed") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) is + # completed. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.completed"] end class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel @@ -146,22 +130,17 @@ class ThreadMessageIncomplete < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.incomplete"] required :event, const: :"thread.message.incomplete" - # @!parse - # # Occurs when a - # # [message](https://platform.openai.com/docs/api-reference/messages/object) ends - # # before it is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Message] - # # @param event [Symbol, :"thread.message.incomplete"] - # # - # def initialize(data:, event: :"thread.message.incomplete", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.message.incomplete") + # Occurs when a + # [message](https://platform.openai.com/docs/api-reference/messages/object) ends + # before it is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Message] + # @param event [Symbol, :"thread.message.incomplete"] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCreated, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageInProgress, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageDelta, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageCompleted, OpenAI::Models::Beta::MessageStreamEvent::ThreadMessageIncomplete)] end end end diff --git a/lib/openai/models/beta/run_step_stream_event.rb b/lib/openai/models/beta/run_step_stream_event.rb index 444add26..e312bf45 100644 --- a/lib/openai/models/beta/run_step_stream_event.rb +++ b/lib/openai/models/beta/run_step_stream_event.rb @@ -47,17 +47,13 @@ class ThreadRunStepCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.created"] required :event, const: :"thread.run.step.created" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.created"] - # # - # def initialize(data:, event: :"thread.run.step.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.created") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is created. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.created"] end class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel @@ -72,17 +68,13 @@ class ThreadRunStepInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.in_progress"] required :event, const: :"thread.run.step.in_progress" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # moves to an `in_progress` state. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.in_progress"] - # # - # def initialize(data:, event: :"thread.run.step.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.in_progress") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # moves to an `in_progress` state. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.in_progress"] end class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel @@ -98,17 +90,13 @@ class ThreadRunStepDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.delta"] required :event, const: :"thread.run.step.delta" - # @!parse - # # Occurs when parts of a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # are being streamed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] - # # @param event [Symbol, :"thread.run.step.delta"] - # # - # def initialize(data:, event: :"thread.run.step.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.delta") + # Occurs when parts of a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # are being streamed. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaEvent] + # @param event [Symbol, :"thread.run.step.delta"] end class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel @@ -123,17 +111,13 @@ class ThreadRunStepCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.completed"] required :event, const: :"thread.run.step.completed" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.completed"] - # # - # def initialize(data:, event: :"thread.run.step.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.completed") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.completed"] end class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel @@ -148,17 +132,13 @@ class ThreadRunStepFailed < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.failed"] required :event, const: :"thread.run.step.failed" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # fails. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.failed"] - # # - # def initialize(data:, event: :"thread.run.step.failed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.failed") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # fails. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.failed"] end class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel @@ -173,17 +153,13 @@ class ThreadRunStepCancelled < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.cancelled"] required :event, const: :"thread.run.step.cancelled" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # is cancelled. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.cancelled"] - # # - # def initialize(data:, event: :"thread.run.step.cancelled", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.cancelled") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # is cancelled. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.cancelled"] end class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel @@ -198,22 +174,17 @@ class ThreadRunStepExpired < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.expired"] required :event, const: :"thread.run.step.expired" - # @!parse - # # Occurs when a - # # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) - # # expires. - # # - # # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] - # # @param event [Symbol, :"thread.run.step.expired"] - # # - # def initialize(data:, event: :"thread.run.step.expired", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.step.expired") + # Occurs when a + # [run step](https://platform.openai.com/docs/api-reference/run-steps/step-object) + # expires. + # + # @param data [OpenAI::Models::Beta::Threads::Runs::RunStep] + # @param event [Symbol, :"thread.run.step.expired"] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCreated, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepInProgress, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepDelta, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCompleted, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepFailed, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepCancelled, OpenAI::Models::Beta::RunStepStreamEvent::ThreadRunStepExpired)] end end end diff --git a/lib/openai/models/beta/run_stream_event.rb b/lib/openai/models/beta/run_stream_event.rb index 90552346..33a63272 100644 --- a/lib/openai/models/beta/run_stream_event.rb +++ b/lib/openai/models/beta/run_stream_event.rb @@ -54,16 +54,12 @@ class ThreadRunCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.created"] required :event, const: :"thread.run.created" - # @!parse - # # Occurs when a new - # # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.created"] - # # - # def initialize(data:, event: :"thread.run.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.created") + # Occurs when a new + # [run](https://platform.openai.com/docs/api-reference/runs/object) is created. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.created"] end class ThreadRunQueued < OpenAI::Internal::Type::BaseModel @@ -79,16 +75,12 @@ class ThreadRunQueued < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.queued"] required :event, const: :"thread.run.queued" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `queued` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.queued"] - # # - # def initialize(data:, event: :"thread.run.queued", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.queued") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `queued` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.queued"] end class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel @@ -104,16 +96,12 @@ class ThreadRunInProgress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.in_progress"] required :event, const: :"thread.run.in_progress" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to an `in_progress` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.in_progress"] - # # - # def initialize(data:, event: :"thread.run.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.in_progress") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to an `in_progress` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.in_progress"] end class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel @@ -129,16 +117,12 @@ class ThreadRunRequiresAction < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.requires_action"] required :event, const: :"thread.run.requires_action" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `requires_action` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.requires_action"] - # # - # def initialize(data:, event: :"thread.run.requires_action", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.requires_action") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `requires_action` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.requires_action"] end class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel @@ -154,16 +138,12 @@ class ThreadRunCompleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.completed"] required :event, const: :"thread.run.completed" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is completed. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.completed"] - # # - # def initialize(data:, event: :"thread.run.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.completed") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is completed. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.completed"] end class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel @@ -179,16 +159,12 @@ class ThreadRunIncomplete < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.incomplete"] required :event, const: :"thread.run.incomplete" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # ends with status `incomplete`. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.incomplete"] - # # - # def initialize(data:, event: :"thread.run.incomplete", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.incomplete") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # ends with status `incomplete`. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.incomplete"] end class ThreadRunFailed < OpenAI::Internal::Type::BaseModel @@ -204,16 +180,12 @@ class ThreadRunFailed < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.failed"] required :event, const: :"thread.run.failed" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # fails. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.failed"] - # # - # def initialize(data:, event: :"thread.run.failed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.failed") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # fails. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.failed"] end class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel @@ -229,16 +201,12 @@ class ThreadRunCancelling < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.cancelling"] required :event, const: :"thread.run.cancelling" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # moves to a `cancelling` status. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.cancelling"] - # # - # def initialize(data:, event: :"thread.run.cancelling", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.cancelling") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # moves to a `cancelling` status. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.cancelling"] end class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel @@ -254,16 +222,12 @@ class ThreadRunCancelled < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.cancelled"] required :event, const: :"thread.run.cancelled" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # is cancelled. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.cancelled"] - # # - # def initialize(data:, event: :"thread.run.cancelled", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.cancelled") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # is cancelled. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.cancelled"] end class ThreadRunExpired < OpenAI::Internal::Type::BaseModel @@ -279,21 +243,16 @@ class ThreadRunExpired < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.expired"] required :event, const: :"thread.run.expired" - # @!parse - # # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) - # # expires. - # # - # # @param data [OpenAI::Models::Beta::Threads::Run] - # # @param event [Symbol, :"thread.run.expired"] - # # - # def initialize(data:, event: :"thread.run.expired", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, event: :"thread.run.expired") + # Occurs when a [run](https://platform.openai.com/docs/api-reference/runs/object) + # expires. + # + # @param data [OpenAI::Models::Beta::Threads::Run] + # @param event [Symbol, :"thread.run.expired"] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::RunStreamEvent::ThreadRunCreated, OpenAI::Models::Beta::RunStreamEvent::ThreadRunQueued, OpenAI::Models::Beta::RunStreamEvent::ThreadRunInProgress, OpenAI::Models::Beta::RunStreamEvent::ThreadRunRequiresAction, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCompleted, OpenAI::Models::Beta::RunStreamEvent::ThreadRunIncomplete, OpenAI::Models::Beta::RunStreamEvent::ThreadRunFailed, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelling, OpenAI::Models::Beta::RunStreamEvent::ThreadRunCancelled, OpenAI::Models::Beta::RunStreamEvent::ThreadRunExpired)] end end end diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index 5f08e2ff..bdb79d97 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -43,19 +43,15 @@ class Thread < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::Thread::ToolResources, nil] required :tool_resources, -> { OpenAI::Models::Beta::Thread::ToolResources }, nil?: true - # @!parse - # # Represents a thread that contains - # # [messages](https://platform.openai.com/docs/api-reference/messages). - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param tool_resources [OpenAI::Models::Beta::Thread::ToolResources, nil] - # # @param object [Symbol, :thread] - # # - # def initialize(id:, created_at:, metadata:, tool_resources:, object: :thread, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, metadata:, tool_resources:, object: :thread) + # Represents a thread that contains + # [messages](https://platform.openai.com/docs/api-reference/messages). + # + # @param id [String] + # @param created_at [Integer] + # @param metadata [Hash{Symbol=>String}, nil] + # @param tool_resources [OpenAI::Models::Beta::Thread::ToolResources, nil] + # @param object [Symbol, :thread] # @see OpenAI::Models::Beta::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel @@ -77,18 +73,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] # @see OpenAI::Models::Beta::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -104,12 +96,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::Thread::ToolResources#file_search @@ -127,12 +115,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_store_ids - # @!parse - # # @param vector_store_ids [Array] - # # - # def initialize(vector_store_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil) + # @param vector_store_ids [Array] end end end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index 4612ebd8..c87d75f8 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -171,47 +171,23 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy }, nil?: true - # @!parse - # # @param assistant_id [String] - # # @param instructions [String, nil] - # # @param max_completion_tokens [Integer, nil] - # # @param max_prompt_tokens [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] - # # @param parallel_tool_calls [Boolean] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param temperature [Float, nil] - # # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] - # # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] - # # @param tools [Array, nil] - # # @param top_p [Float, nil] - # # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # assistant_id:, - # instructions: nil, - # max_completion_tokens: nil, - # max_prompt_tokens: nil, - # metadata: nil, - # model: nil, - # parallel_tool_calls: nil, - # response_format: nil, - # temperature: nil, - # thread: nil, - # tool_choice: nil, - # tool_resources: nil, - # tools: nil, - # top_p: nil, - # truncation_strategy: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(assistant_id:, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, response_format: nil, temperature: nil, thread: nil, tool_choice: nil, tool_resources: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @param assistant_id [String] + # @param instructions [String, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_prompt_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] + # @param parallel_tool_calls [Boolean] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param thread [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources, nil] + # @param tools [Array, nil] + # @param top_p [Float, nil] + # @param truncation_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to # be used to execute this run. If a value is provided here, it will override the @@ -225,9 +201,8 @@ module Model # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. variant enum: -> { OpenAI::Models::ChatModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class Thread < OpenAI::Internal::Type::BaseModel @@ -265,17 +240,13 @@ class Thread < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources }, nil?: true - # @!parse - # # Options to create a new thread. If no thread is provided when running a request, - # # an empty thread will be created. - # # - # # @param messages [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] - # # - # def initialize(messages: nil, metadata: nil, tool_resources: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(messages: nil, metadata: nil, tool_resources: nil) + # Options to create a new thread. If no thread is provided when running a request, + # an empty thread will be created. + # + # @param messages [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources, nil] class Message < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -314,15 +285,11 @@ class Message < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param content [String, Array] - # # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] - # # @param attachments [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(content:, role:, attachments: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, attachments: nil, metadata: nil) + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Role] + # @param attachments [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] # The text contents of the message. # @@ -336,9 +303,8 @@ module Content # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Content::MessageContentPartParamArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -358,11 +324,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class Attachment < OpenAI::Internal::Type::BaseModel @@ -387,13 +350,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tools - # @!parse - # # @param file_id [String] - # # @param tools [Array] - # # - # def initialize(file_id: nil, tools: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, tools: nil) + # @param file_id [String] + # @param tools [Array] module Tool extend OpenAI::Internal::Type::Union @@ -412,17 +371,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param type [Symbol, :file_search] - # # - # def initialize(type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :file_search) + # @param type [Symbol, :file_search] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool::FileSearch)] end end end @@ -449,18 +403,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -476,12 +426,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search @@ -513,13 +459,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_stores - # @!parse - # # @param vector_store_ids [Array] - # # @param vector_stores [Array] - # # - # def initialize(vector_store_ids: nil, vector_stores: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil, vector_stores: nil) + # @param vector_store_ids [Array] + # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy @@ -557,14 +499,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # # @param file_ids [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(chunking_strategy: nil, file_ids: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) + # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + # @param file_ids [Array] + # @param metadata [Hash{Symbol=>String}, nil] # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -589,15 +527,11 @@ class Auto < OpenAI::Internal::Type::BaseModel # @return [Symbol, :auto] required :type, const: :auto - # @!parse - # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. - # # - # # @param type [Symbol, :auto] - # # - # def initialize(type: :auto, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :auto) + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. + # + # @param type [Symbol, :auto] end class Static < OpenAI::Internal::Type::BaseModel @@ -613,13 +547,9 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Symbol, :static] required :type, const: :static - # @!parse - # # @param static [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] - # # @param type [Symbol, :static] - # # - # def initialize(static:, type: :static, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(static:, type: :static) + # @param static [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param type [Symbol, :static] # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel @@ -638,19 +568,14 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Integer] required :max_chunk_size_tokens, Integer - # @!parse - # # @param chunk_overlap_tokens [Integer] - # # @param max_chunk_size_tokens [Integer] - # # - # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) + # @param chunk_overlap_tokens [Integer] + # @param max_chunk_size_tokens [Integer] end end - # @!parse - # # @return [Array(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end @@ -677,18 +602,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are used by the assistant's tools. The resources are - # # specific to the type of tool. For example, the `code_interpreter` tool requires - # # a list of file IDs, while the `file_search` tool requires a list of vector store - # # IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are used by the assistant's tools. The resources are + # specific to the type of tool. For example, the `code_interpreter` tool requires + # a list of file IDs, while the `file_search` tool requires a list of vector store + # IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -704,12 +625,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search @@ -727,12 +644,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_store_ids - # @!parse - # # @param vector_store_ids [Array] - # # - # def initialize(vector_store_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil) + # @param vector_store_ids [Array] end end @@ -745,9 +658,8 @@ module Tool variant -> { OpenAI::Models::Beta::FunctionTool } - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::FileSearchTool, OpenAI::Models::Beta::FunctionTool)] end class TruncationStrategy < OpenAI::Internal::Type::BaseModel @@ -767,16 +679,12 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :last_messages, Integer, nil?: true - # @!parse - # # Controls for how a thread will be truncated prior to the run. Use this to - # # control the intial context window of the run. - # # - # # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] - # # @param last_messages [Integer, nil] - # # - # def initialize(type:, last_messages: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:, last_messages: nil) + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Models::Beta::ThreadCreateAndRunParams::TruncationStrategy::Type] + # @param last_messages [Integer, nil] # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -790,11 +698,8 @@ module Type AUTO = :auto LAST_MESSAGES = :last_messages - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 08818856..85033056 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -41,15 +41,11 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources }, nil?: true - # @!parse - # # @param messages [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(messages: nil, metadata: nil, tool_resources: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(messages: nil, metadata: nil, tool_resources: nil, request_options: {}) + # @param messages [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadCreateParams::ToolResources, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class Message < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -88,15 +84,11 @@ class Message < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param content [String, Array] - # # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] - # # @param attachments [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(content:, role:, attachments: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, attachments: nil, metadata: nil) + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Beta::ThreadCreateParams::Message::Role] + # @param attachments [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] # The text contents of the message. # @@ -110,9 +102,8 @@ module Content # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant -> { OpenAI::Models::Beta::ThreadCreateParams::Message::Content::MessageContentPartParamArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -132,11 +123,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class Attachment < OpenAI::Internal::Type::BaseModel @@ -161,13 +149,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tools - # @!parse - # # @param file_id [String] - # # @param tools [Array] - # # - # def initialize(file_id: nil, tools: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, tools: nil) + # @param file_id [String] + # @param tools [Array] module Tool extend OpenAI::Internal::Type::Union @@ -186,17 +170,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param type [Symbol, :file_search] - # # - # def initialize(type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :file_search) + # @param type [Symbol, :file_search] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool::FileSearch)] end end end @@ -221,18 +200,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -248,12 +223,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search @@ -285,13 +256,9 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_stores - # @!parse - # # @param vector_store_ids [Array] - # # @param vector_stores [Array] - # # - # def initialize(vector_store_ids: nil, vector_stores: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil, vector_stores: nil) + # @param vector_store_ids [Array] + # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel # @!attribute [r] chunking_strategy @@ -329,14 +296,10 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # # @param file_ids [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(chunking_strategy: nil, file_ids: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunking_strategy: nil, file_ids: nil, metadata: nil) + # @param chunking_strategy [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] + # @param file_ids [Array] + # @param metadata [Hash{Symbol=>String}, nil] # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. @@ -361,15 +324,11 @@ class Auto < OpenAI::Internal::Type::BaseModel # @return [Symbol, :auto] required :type, const: :auto - # @!parse - # # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of - # # `800` and `chunk_overlap_tokens` of `400`. - # # - # # @param type [Symbol, :auto] - # # - # def initialize(type: :auto, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :auto) + # The default strategy. This strategy currently uses a `max_chunk_size_tokens` of + # `800` and `chunk_overlap_tokens` of `400`. + # + # @param type [Symbol, :auto] end class Static < OpenAI::Internal::Type::BaseModel @@ -385,13 +344,9 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Symbol, :static] required :type, const: :static - # @!parse - # # @param static [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] - # # @param type [Symbol, :static] - # # - # def initialize(static:, type: :static, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(static:, type: :static) + # @param static [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static::Static] + # @param type [Symbol, :static] # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static#static class Static < OpenAI::Internal::Type::BaseModel @@ -410,19 +365,14 @@ class Static < OpenAI::Internal::Type::BaseModel # @return [Integer] required :max_chunk_size_tokens, Integer - # @!parse - # # @param chunk_overlap_tokens [Integer] - # # @param max_chunk_size_tokens [Integer] - # # - # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) + # @param chunk_overlap_tokens [Integer] + # @param max_chunk_size_tokens [Integer] end end - # @!parse - # # @return [Array(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static)] end end end diff --git a/lib/openai/models/beta/thread_delete_params.rb b/lib/openai/models/beta/thread_delete_params.rb index aa6b5341..2071367c 100644 --- a/lib/openai/models/beta/thread_delete_params.rb +++ b/lib/openai/models/beta/thread_delete_params.rb @@ -9,12 +9,8 @@ class ThreadDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/thread_deleted.rb b/lib/openai/models/beta/thread_deleted.rb index 6bd4b0db..862e25fc 100644 --- a/lib/openai/models/beta/thread_deleted.rb +++ b/lib/openai/models/beta/thread_deleted.rb @@ -20,14 +20,10 @@ class ThreadDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.deleted"] required :object, const: :"thread.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"thread.deleted"] - # # - # def initialize(id:, deleted:, object: :"thread.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"thread.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"thread.deleted"] end end end diff --git a/lib/openai/models/beta/thread_retrieve_params.rb b/lib/openai/models/beta/thread_retrieve_params.rb index db2128ba..663f5488 100644 --- a/lib/openai/models/beta/thread_retrieve_params.rb +++ b/lib/openai/models/beta/thread_retrieve_params.rb @@ -9,12 +9,8 @@ class ThreadRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index e67ecf5f..d7dee4f7 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -26,18 +26,14 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :enabled - # @!parse - # # Occurs when a new - # # [thread](https://platform.openai.com/docs/api-reference/threads/object) is - # # created. - # # - # # @param data [OpenAI::Models::Beta::Thread] - # # @param enabled [Boolean] - # # @param event [Symbol, :"thread.created"] - # # - # def initialize(data:, enabled: nil, event: :"thread.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, enabled: nil, event: :"thread.created") + # Occurs when a new + # [thread](https://platform.openai.com/docs/api-reference/threads/object) is + # created. + # + # @param data [OpenAI::Models::Beta::Thread] + # @param enabled [Boolean] + # @param event [Symbol, :"thread.created"] end end end diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index 07938465..97527fbf 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -29,14 +29,10 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources }, nil?: true - # @!parse - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(metadata: nil, tool_resources: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(metadata: nil, tool_resources: nil, request_options: {}) + # @param metadata [Hash{Symbol=>String}, nil] + # @param tool_resources [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolResources < OpenAI::Internal::Type::BaseModel # @!attribute [r] code_interpreter @@ -58,18 +54,14 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] # attr_writer :file_search - # @!parse - # # A set of resources that are made available to the assistant's tools in this - # # thread. The resources are specific to the type of tool. For example, the - # # `code_interpreter` tool requires a list of file IDs, while the `file_search` - # # tool requires a list of vector store IDs. - # # - # # @param code_interpreter [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] - # # @param file_search [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] - # # - # def initialize(code_interpreter: nil, file_search: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter: nil, file_search: nil) + # A set of resources that are made available to the assistant's tools in this + # thread. The resources are specific to the type of tool. For example, the + # `code_interpreter` tool requires a list of file IDs, while the `file_search` + # tool requires a list of vector store IDs. + # + # @param code_interpreter [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] + # @param file_search [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -85,12 +77,8 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :file_ids - # @!parse - # # @param file_ids [Array] - # # - # def initialize(file_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids: nil) + # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#file_search @@ -108,12 +96,8 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :vector_store_ids - # @!parse - # # @param vector_store_ids [Array] - # # - # def initialize(vector_store_ids: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids: nil) + # @param vector_store_ids [Array] end end end diff --git a/lib/openai/models/beta/threads/annotation.rb b/lib/openai/models/beta/threads/annotation.rb index 10511e12..b5adaf6d 100644 --- a/lib/openai/models/beta/threads/annotation.rb +++ b/lib/openai/models/beta/threads/annotation.rb @@ -18,9 +18,8 @@ module Annotation # A URL for the file that's generated when the assistant used the `code_interpreter` tool to generate a file. variant :file_path, -> { OpenAI::Models::Beta::Threads::FilePathAnnotation } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::FileCitationAnnotation, OpenAI::Models::Beta::Threads::FilePathAnnotation)] end end end diff --git a/lib/openai/models/beta/threads/annotation_delta.rb b/lib/openai/models/beta/threads/annotation_delta.rb index 726c91ad..c6ed7bc1 100644 --- a/lib/openai/models/beta/threads/annotation_delta.rb +++ b/lib/openai/models/beta/threads/annotation_delta.rb @@ -18,9 +18,8 @@ module AnnotationDelta # A URL for the file that's generated when the assistant used the `code_interpreter` tool to generate a file. variant :file_path, -> { OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation, OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation)] end end end diff --git a/lib/openai/models/beta/threads/file_citation_annotation.rb b/lib/openai/models/beta/threads/file_citation_annotation.rb index cf4eab2d..77b9e19d 100644 --- a/lib/openai/models/beta/threads/file_citation_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_annotation.rb @@ -32,20 +32,16 @@ class FileCitationAnnotation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_citation] required :type, const: :file_citation - # @!parse - # # A citation within the message that points to a specific quote from a specific - # # File associated with the assistant or the message. Generated when the assistant - # # uses the "file_search" tool to search files. - # # - # # @param end_index [Integer] - # # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] - # # @param start_index [Integer] - # # @param text [String] - # # @param type [Symbol, :file_citation] - # # - # def initialize(end_index:, file_citation:, start_index:, text:, type: :file_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_index:, file_citation:, start_index:, text:, type: :file_citation) + # A citation within the message that points to a specific quote from a specific + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. + # + # @param end_index [Integer] + # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationAnnotation::FileCitation] + # @param start_index [Integer] + # @param text [String] + # @param type [Symbol, :file_citation] # @see OpenAI::Models::Beta::Threads::FileCitationAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel @@ -55,12 +51,8 @@ class FileCitation < OpenAI::Internal::Type::BaseModel # @return [String] required :file_id, String - # @!parse - # # @param file_id [String] - # # - # def initialize(file_id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:) + # @param file_id [String] end end end diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index d1ac99c2..66b0623a 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -54,21 +54,17 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :text - # @!parse - # # A citation within the message that points to a specific quote from a specific - # # File associated with the assistant or the message. Generated when the assistant - # # uses the "file_search" tool to search files. - # # - # # @param index [Integer] - # # @param end_index [Integer] - # # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] - # # @param start_index [Integer] - # # @param text [String] - # # @param type [Symbol, :file_citation] - # # - # def initialize(index:, end_index: nil, file_citation: nil, start_index: nil, text: nil, type: :file_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, end_index: nil, file_citation: nil, start_index: nil, text: nil, type: :file_citation) + # A citation within the message that points to a specific quote from a specific + # File associated with the assistant or the message. Generated when the assistant + # uses the "file_search" tool to search files. + # + # @param index [Integer] + # @param end_index [Integer] + # @param file_citation [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] + # @param start_index [Integer] + # @param text [String] + # @param type [Symbol, :file_citation] # @see OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel @@ -92,13 +88,9 @@ class FileCitation < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :quote - # @!parse - # # @param file_id [String] - # # @param quote [String] - # # - # def initialize(file_id: nil, quote: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, quote: nil) + # @param file_id [String] + # @param quote [String] end end end diff --git a/lib/openai/models/beta/threads/file_path_annotation.rb b/lib/openai/models/beta/threads/file_path_annotation.rb index c1a51ef6..91c2a6fd 100644 --- a/lib/openai/models/beta/threads/file_path_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_annotation.rb @@ -32,19 +32,15 @@ class FilePathAnnotation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_path] required :type, const: :file_path - # @!parse - # # A URL for the file that's generated when the assistant used the - # # `code_interpreter` tool to generate a file. - # # - # # @param end_index [Integer] - # # @param file_path [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] - # # @param start_index [Integer] - # # @param text [String] - # # @param type [Symbol, :file_path] - # # - # def initialize(end_index:, file_path:, start_index:, text:, type: :file_path, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_index:, file_path:, start_index:, text:, type: :file_path) + # A URL for the file that's generated when the assistant used the + # `code_interpreter` tool to generate a file. + # + # @param end_index [Integer] + # @param file_path [OpenAI::Models::Beta::Threads::FilePathAnnotation::FilePath] + # @param start_index [Integer] + # @param text [String] + # @param type [Symbol, :file_path] # @see OpenAI::Models::Beta::Threads::FilePathAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel @@ -54,12 +50,8 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @return [String] required :file_id, String - # @!parse - # # @param file_id [String] - # # - # def initialize(file_id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:) + # @param file_id [String] end end end diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index 975bd449..065e7eab 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -54,20 +54,16 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :text - # @!parse - # # A URL for the file that's generated when the assistant used the - # # `code_interpreter` tool to generate a file. - # # - # # @param index [Integer] - # # @param end_index [Integer] - # # @param file_path [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath] - # # @param start_index [Integer] - # # @param text [String] - # # @param type [Symbol, :file_path] - # # - # def initialize(index:, end_index: nil, file_path: nil, start_index: nil, text: nil, type: :file_path, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, end_index: nil, file_path: nil, start_index: nil, text: nil, type: :file_path) + # A URL for the file that's generated when the assistant used the + # `code_interpreter` tool to generate a file. + # + # @param index [Integer] + # @param end_index [Integer] + # @param file_path [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath] + # @param start_index [Integer] + # @param text [String] + # @param type [Symbol, :file_path] # @see OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel @@ -81,12 +77,8 @@ class FilePath < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :file_id - # @!parse - # # @param file_id [String] - # # - # def initialize(file_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil) + # @param file_id [String] end end end diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index 631803f6..61b60dc0 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -24,13 +24,9 @@ class ImageFile < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] # attr_writer :detail - # @!parse - # # @param file_id [String] - # # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] - # # - # def initialize(file_id:, detail: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, detail: nil) + # @param file_id [String] + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. @@ -43,11 +39,8 @@ module Detail LOW = :low HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/image_file_content_block.rb b/lib/openai/models/beta/threads/image_file_content_block.rb index 4a5a487e..2bcba265 100644 --- a/lib/openai/models/beta/threads/image_file_content_block.rb +++ b/lib/openai/models/beta/threads/image_file_content_block.rb @@ -16,16 +16,12 @@ class ImageFileContentBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_file] required :type, const: :image_file - # @!parse - # # References an image [File](https://platform.openai.com/docs/api-reference/files) - # # in the content of a message. - # # - # # @param image_file [OpenAI::Models::Beta::Threads::ImageFile] - # # @param type [Symbol, :image_file] - # # - # def initialize(image_file:, type: :image_file, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image_file:, type: :image_file) + # References an image [File](https://platform.openai.com/docs/api-reference/files) + # in the content of a message. + # + # @param image_file [OpenAI::Models::Beta::Threads::ImageFile] + # @param type [Symbol, :image_file] end end end diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 219cfba9..117dd1c1 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -28,13 +28,9 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :file_id - # @!parse - # # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] - # # @param file_id [String] - # # - # def initialize(detail: nil, file_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(detail: nil, file_id: nil) + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] + # @param file_id [String] # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. @@ -47,11 +43,8 @@ module Detail LOW = :low HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/image_file_delta_block.rb b/lib/openai/models/beta/threads/image_file_delta_block.rb index 9a5625ad..a4abc497 100644 --- a/lib/openai/models/beta/threads/image_file_delta_block.rb +++ b/lib/openai/models/beta/threads/image_file_delta_block.rb @@ -26,17 +26,13 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::ImageFileDelta] # attr_writer :image_file - # @!parse - # # References an image [File](https://platform.openai.com/docs/api-reference/files) - # # in the content of a message. - # # - # # @param index [Integer] - # # @param image_file [OpenAI::Models::Beta::Threads::ImageFileDelta] - # # @param type [Symbol, :image_file] - # # - # def initialize(index:, image_file: nil, type: :image_file, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, image_file: nil, type: :image_file) + # References an image [File](https://platform.openai.com/docs/api-reference/files) + # in the content of a message. + # + # @param index [Integer] + # @param image_file [OpenAI::Models::Beta::Threads::ImageFileDelta] + # @param type [Symbol, :image_file] end end end diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index a76f8467..14266c31 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -23,13 +23,9 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] # attr_writer :detail - # @!parse - # # @param url [String] - # # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] - # # - # def initialize(url:, detail: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(url:, detail: nil) + # @param url [String] + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` @@ -42,11 +38,8 @@ module Detail LOW = :low HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/image_url_content_block.rb b/lib/openai/models/beta/threads/image_url_content_block.rb index 326f1c1d..b44975eb 100644 --- a/lib/openai/models/beta/threads/image_url_content_block.rb +++ b/lib/openai/models/beta/threads/image_url_content_block.rb @@ -16,15 +16,11 @@ class ImageURLContentBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_url] required :type, const: :image_url - # @!parse - # # References an image URL in the content of a message. - # # - # # @param image_url [OpenAI::Models::Beta::Threads::ImageURL] - # # @param type [Symbol, :image_url] - # # - # def initialize(image_url:, type: :image_url, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image_url:, type: :image_url) + # References an image URL in the content of a message. + # + # @param image_url [OpenAI::Models::Beta::Threads::ImageURL] + # @param type [Symbol, :image_url] end end end diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index 4ae3e547..3b2f4eab 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -27,13 +27,9 @@ class ImageURLDelta < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :url - # @!parse - # # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] - # # @param url [String] - # # - # def initialize(detail: nil, url: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(detail: nil, url: nil) + # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] + # @param url [String] # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. @@ -46,11 +42,8 @@ module Detail LOW = :low HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/image_url_delta_block.rb b/lib/openai/models/beta/threads/image_url_delta_block.rb index 950a87c1..4f7b9a82 100644 --- a/lib/openai/models/beta/threads/image_url_delta_block.rb +++ b/lib/openai/models/beta/threads/image_url_delta_block.rb @@ -26,16 +26,12 @@ class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::ImageURLDelta] # attr_writer :image_url - # @!parse - # # References an image URL in the content of a message. - # # - # # @param index [Integer] - # # @param image_url [OpenAI::Models::Beta::Threads::ImageURLDelta] - # # @param type [Symbol, :image_url] - # # - # def initialize(index:, image_url: nil, type: :image_url, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, image_url: nil, type: :image_url) + # References an image URL in the content of a message. + # + # @param index [Integer] + # @param image_url [OpenAI::Models::Beta::Threads::ImageURLDelta] + # @param type [Symbol, :image_url] end end end diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 355140de..90cf7aba 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -104,46 +104,24 @@ class Message < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!parse - # # Represents a message within a - # # [thread](https://platform.openai.com/docs/api-reference/threads). - # # - # # @param id [String] - # # @param assistant_id [String, nil] - # # @param attachments [Array, nil] - # # @param completed_at [Integer, nil] - # # @param content [Array] - # # @param created_at [Integer] - # # @param incomplete_at [Integer, nil] - # # @param incomplete_details [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param role [Symbol, OpenAI::Models::Beta::Threads::Message::Role] - # # @param run_id [String, nil] - # # @param status [Symbol, OpenAI::Models::Beta::Threads::Message::Status] - # # @param thread_id [String] - # # @param object [Symbol, :"thread.message"] - # # - # def initialize( - # id:, - # assistant_id:, - # attachments:, - # completed_at:, - # content:, - # created_at:, - # incomplete_at:, - # incomplete_details:, - # metadata:, - # role:, - # run_id:, - # status:, - # thread_id:, - # object: :"thread.message", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, assistant_id:, attachments:, completed_at:, content:, created_at:, incomplete_at:, incomplete_details:, metadata:, role:, run_id:, status:, thread_id:, object: :"thread.message") + # Represents a message within a + # [thread](https://platform.openai.com/docs/api-reference/threads). + # + # @param id [String] + # @param assistant_id [String, nil] + # @param attachments [Array, nil] + # @param completed_at [Integer, nil] + # @param content [Array] + # @param created_at [Integer] + # @param incomplete_at [Integer, nil] + # @param incomplete_details [OpenAI::Models::Beta::Threads::Message::IncompleteDetails, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param role [Symbol, OpenAI::Models::Beta::Threads::Message::Role] + # @param run_id [String, nil] + # @param status [Symbol, OpenAI::Models::Beta::Threads::Message::Status] + # @param thread_id [String] + # @param object [Symbol, :"thread.message"] class Attachment < OpenAI::Internal::Type::BaseModel # @!attribute [r] file_id @@ -167,13 +145,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tools - # @!parse - # # @param file_id [String] - # # @param tools [Array] - # # - # def initialize(file_id: nil, tools: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, tools: nil) + # @param file_id [String] + # @param tools [Array] module Tool extend OpenAI::Internal::Type::Union @@ -189,17 +163,12 @@ class AssistantToolsFileSearchTypeOnly < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param type [Symbol, :file_search] - # # - # def initialize(type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :file_search) + # @param type [Symbol, :file_search] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::Message::Attachment::Tool::AssistantToolsFileSearchTypeOnly)] end end @@ -211,14 +180,10 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] required :reason, enum: -> { OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason } - # @!parse - # # On an incomplete message, details about why the message is incomplete. - # # - # # @param reason [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] - # # - # def initialize(reason:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(reason:) + # On an incomplete message, details about why the message is incomplete. + # + # @param reason [Symbol, OpenAI::Models::Beta::Threads::Message::IncompleteDetails::Reason] # The reason the message is incomplete. # @@ -232,11 +197,8 @@ module Reason RUN_EXPIRED = :run_expired RUN_FAILED = :run_failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -249,11 +211,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The status of the message, which can be either `in_progress`, `incomplete`, or @@ -267,11 +226,8 @@ module Status INCOMPLETE = :incomplete COMPLETED = :completed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/message_content.rb b/lib/openai/models/beta/threads/message_content.rb index b6ed040c..dbc60f62 100644 --- a/lib/openai/models/beta/threads/message_content.rb +++ b/lib/openai/models/beta/threads/message_content.rb @@ -23,9 +23,8 @@ module MessageContent # The refusal content generated by the assistant. variant :refusal, -> { OpenAI::Models::Beta::Threads::RefusalContentBlock } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlock, OpenAI::Models::Beta::Threads::RefusalContentBlock)] end end end diff --git a/lib/openai/models/beta/threads/message_content_delta.rb b/lib/openai/models/beta/threads/message_content_delta.rb index a69eadda..744d6ed2 100644 --- a/lib/openai/models/beta/threads/message_content_delta.rb +++ b/lib/openai/models/beta/threads/message_content_delta.rb @@ -23,9 +23,8 @@ module MessageContentDelta # References an image URL in the content of a message. variant :image_url, -> { OpenAI::Models::Beta::Threads::ImageURLDeltaBlock } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileDeltaBlock, OpenAI::Models::Beta::Threads::TextDeltaBlock, OpenAI::Models::Beta::Threads::RefusalDeltaBlock, OpenAI::Models::Beta::Threads::ImageURLDeltaBlock)] end end end diff --git a/lib/openai/models/beta/threads/message_content_part_param.rb b/lib/openai/models/beta/threads/message_content_part_param.rb index faab6ef1..9310f8bb 100644 --- a/lib/openai/models/beta/threads/message_content_part_param.rb +++ b/lib/openai/models/beta/threads/message_content_part_param.rb @@ -20,9 +20,8 @@ module MessageContentPartParam # The text content that is part of a message. variant :text, -> { OpenAI::Models::Beta::Threads::TextContentBlockParam } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::ImageFileContentBlock, OpenAI::Models::Beta::Threads::ImageURLContentBlock, OpenAI::Models::Beta::Threads::TextContentBlockParam)] end end end diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index ea41994f..081a69c8 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -46,16 +46,12 @@ class MessageCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param content [String, Array] - # # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] - # # @param attachments [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(content:, role:, attachments: nil, metadata: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, attachments: nil, metadata: nil, request_options: {}) + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageCreateParams::Role] + # @param attachments [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The text contents of the message. module Content @@ -67,9 +63,8 @@ module Content # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant -> { OpenAI::Models::Beta::Threads::MessageCreateParams::Content::MessageContentPartParamArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -87,11 +82,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class Attachment < OpenAI::Internal::Type::BaseModel @@ -116,13 +108,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tools - # @!parse - # # @param file_id [String] - # # @param tools [Array] - # # - # def initialize(file_id: nil, tools: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, tools: nil) + # @param file_id [String] + # @param tools [Array] module Tool extend OpenAI::Internal::Type::Union @@ -141,17 +129,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param type [Symbol, :file_search] - # # - # def initialize(type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :file_search) + # @param type [Symbol, :file_search] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool::FileSearch)] end end end diff --git a/lib/openai/models/beta/threads/message_delete_params.rb b/lib/openai/models/beta/threads/message_delete_params.rb index 759a3108..3625e9b1 100644 --- a/lib/openai/models/beta/threads/message_delete_params.rb +++ b/lib/openai/models/beta/threads/message_delete_params.rb @@ -15,13 +15,9 @@ class MessageDeleteParams < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!parse - # # @param thread_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, request_options: {}) + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/message_deleted.rb b/lib/openai/models/beta/threads/message_deleted.rb index 07513949..42e56be5 100644 --- a/lib/openai/models/beta/threads/message_deleted.rb +++ b/lib/openai/models/beta/threads/message_deleted.rb @@ -21,14 +21,10 @@ class MessageDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.deleted"] required :object, const: :"thread.message.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"thread.message.deleted"] - # # - # def initialize(id:, deleted:, object: :"thread.message.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"thread.message.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"thread.message.deleted"] end end end diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index b63f29bf..9458a9bc 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -26,15 +26,11 @@ class MessageDelta < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] # attr_writer :role - # @!parse - # # The delta containing the fields that have changed on the Message. - # # - # # @param content [Array] - # # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] - # # - # def initialize(content: nil, role: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content: nil, role: nil) + # The delta containing the fields that have changed on the Message. + # + # @param content [Array] + # @param role [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] # The entity that produced the message. One of `user` or `assistant`. # @@ -45,11 +41,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/message_delta_event.rb b/lib/openai/models/beta/threads/message_delta_event.rb index 0e313696..e935e3bd 100644 --- a/lib/openai/models/beta/threads/message_delta_event.rb +++ b/lib/openai/models/beta/threads/message_delta_event.rb @@ -23,17 +23,13 @@ class MessageDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.message.delta"] required :object, const: :"thread.message.delta" - # @!parse - # # Represents a message delta i.e. any changed fields on a message during - # # streaming. - # # - # # @param id [String] - # # @param delta [OpenAI::Models::Beta::Threads::MessageDelta] - # # @param object [Symbol, :"thread.message.delta"] - # # - # def initialize(id:, delta:, object: :"thread.message.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, delta:, object: :"thread.message.delta") + # Represents a message delta i.e. any changed fields on a message during + # streaming. + # + # @param id [String] + # @param delta [OpenAI::Models::Beta::Threads::MessageDelta] + # @param object [Symbol, :"thread.message.delta"] end end end diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index b30bcf8a..1fffd076 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -68,17 +68,13 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :run_id - # @!parse - # # @param after [String] - # # @param before [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] - # # @param run_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] + # @param run_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -88,11 +84,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/message_retrieve_params.rb b/lib/openai/models/beta/threads/message_retrieve_params.rb index e2bc129b..10e58171 100644 --- a/lib/openai/models/beta/threads/message_retrieve_params.rb +++ b/lib/openai/models/beta/threads/message_retrieve_params.rb @@ -15,13 +15,9 @@ class MessageRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!parse - # # @param thread_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, request_options: {}) + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index b02d01a6..568cc684 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -26,14 +26,10 @@ class MessageUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param thread_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, metadata: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, metadata: nil, request_options: {}) + # @param thread_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/refusal_content_block.rb b/lib/openai/models/beta/threads/refusal_content_block.rb index 58ab5010..fcdce182 100644 --- a/lib/openai/models/beta/threads/refusal_content_block.rb +++ b/lib/openai/models/beta/threads/refusal_content_block.rb @@ -16,15 +16,11 @@ class RefusalContentBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :refusal] required :type, const: :refusal - # @!parse - # # The refusal content generated by the assistant. - # # - # # @param refusal [String] - # # @param type [Symbol, :refusal] - # # - # def initialize(refusal:, type: :refusal, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(refusal:, type: :refusal) + # The refusal content generated by the assistant. + # + # @param refusal [String] + # @param type [Symbol, :refusal] end end end diff --git a/lib/openai/models/beta/threads/refusal_delta_block.rb b/lib/openai/models/beta/threads/refusal_delta_block.rb index e762e97d..dcc1bd21 100644 --- a/lib/openai/models/beta/threads/refusal_delta_block.rb +++ b/lib/openai/models/beta/threads/refusal_delta_block.rb @@ -26,16 +26,12 @@ class RefusalDeltaBlock < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :refusal - # @!parse - # # The refusal content that is part of a message. - # # - # # @param index [Integer] - # # @param refusal [String] - # # @param type [Symbol, :refusal] - # # - # def initialize(index:, refusal: nil, type: :refusal, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, refusal: nil, type: :refusal) + # The refusal content that is part of a message. + # + # @param index [Integer] + # @param refusal [String] + # @param type [Symbol, :refusal] end end end diff --git a/lib/openai/models/beta/threads/required_action_function_tool_call.rb b/lib/openai/models/beta/threads/required_action_function_tool_call.rb index bd40aadc..7db514ed 100644 --- a/lib/openai/models/beta/threads/required_action_function_tool_call.rb +++ b/lib/openai/models/beta/threads/required_action_function_tool_call.rb @@ -27,16 +27,12 @@ class RequiredActionFunctionToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # Tool call objects - # # - # # @param id [String] - # # @param function [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] - # # @param type [Symbol, :function] - # # - # def initialize(id:, function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, function:, type: :function) + # Tool call objects + # + # @param id [String] + # @param function [OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall::Function] + # @param type [Symbol, :function] # @see OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -52,15 +48,11 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # The function definition. - # # - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments:, name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, name:) + # The function definition. + # + # @param arguments [String] + # @param name [String] end end end diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index c6436c0d..fb23679b 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -219,72 +219,37 @@ class Run < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!parse - # # Represents an execution run on a - # # [thread](https://platform.openai.com/docs/api-reference/threads). - # # - # # @param id [String] - # # @param assistant_id [String] - # # @param cancelled_at [Integer, nil] - # # @param completed_at [Integer, nil] - # # @param created_at [Integer] - # # @param expires_at [Integer, nil] - # # @param failed_at [Integer, nil] - # # @param incomplete_details [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] - # # @param instructions [String] - # # @param last_error [OpenAI::Models::Beta::Threads::Run::LastError, nil] - # # @param max_completion_tokens [Integer, nil] - # # @param max_prompt_tokens [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param parallel_tool_calls [Boolean] - # # @param required_action [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param started_at [Integer, nil] - # # @param status [Symbol, OpenAI::Models::Beta::Threads::RunStatus] - # # @param thread_id [String] - # # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # # @param tools [Array] - # # @param truncation_strategy [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] - # # @param usage [OpenAI::Models::Beta::Threads::Run::Usage, nil] - # # @param temperature [Float, nil] - # # @param top_p [Float, nil] - # # @param object [Symbol, :"thread.run"] - # # - # def initialize( - # id:, - # assistant_id:, - # cancelled_at:, - # completed_at:, - # created_at:, - # expires_at:, - # failed_at:, - # incomplete_details:, - # instructions:, - # last_error:, - # max_completion_tokens:, - # max_prompt_tokens:, - # metadata:, - # model:, - # parallel_tool_calls:, - # required_action:, - # response_format:, - # started_at:, - # status:, - # thread_id:, - # tool_choice:, - # tools:, - # truncation_strategy:, - # usage:, - # temperature: nil, - # top_p: nil, - # object: :"thread.run", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expires_at:, failed_at:, incomplete_details:, instructions:, last_error:, max_completion_tokens:, max_prompt_tokens:, metadata:, model:, parallel_tool_calls:, required_action:, response_format:, started_at:, status:, thread_id:, tool_choice:, tools:, truncation_strategy:, usage:, temperature: nil, top_p: nil, object: :"thread.run") + # Represents an execution run on a + # [thread](https://platform.openai.com/docs/api-reference/threads). + # + # @param id [String] + # @param assistant_id [String] + # @param cancelled_at [Integer, nil] + # @param completed_at [Integer, nil] + # @param created_at [Integer] + # @param expires_at [Integer, nil] + # @param failed_at [Integer, nil] + # @param incomplete_details [OpenAI::Models::Beta::Threads::Run::IncompleteDetails, nil] + # @param instructions [String] + # @param last_error [OpenAI::Models::Beta::Threads::Run::LastError, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_prompt_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param parallel_tool_calls [Boolean] + # @param required_action [OpenAI::Models::Beta::Threads::Run::RequiredAction, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param started_at [Integer, nil] + # @param status [Symbol, OpenAI::Models::Beta::Threads::RunStatus] + # @param thread_id [String] + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] + # @param tools [Array] + # @param truncation_strategy [OpenAI::Models::Beta::Threads::Run::TruncationStrategy, nil] + # @param usage [OpenAI::Models::Beta::Threads::Run::Usage, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param object [Symbol, :"thread.run"] # @see OpenAI::Models::Beta::Threads::Run#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel @@ -299,15 +264,11 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] # attr_writer :reason - # @!parse - # # Details on why the run is incomplete. Will be `null` if the run is not - # # incomplete. - # # - # # @param reason [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] - # # - # def initialize(reason: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(reason: nil) + # Details on why the run is incomplete. Will be `null` if the run is not + # incomplete. + # + # @param reason [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. @@ -319,11 +280,8 @@ module Reason MAX_COMPLETION_TOKENS = :max_completion_tokens MAX_PROMPT_TOKENS = :max_prompt_tokens - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -341,15 +299,11 @@ class LastError < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # The last error associated with this run. Will be `null` if there are no errors. - # # - # # @param code [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] - # # @param message [String] - # # - # def initialize(code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:) + # The last error associated with this run. Will be `null` if there are no errors. + # + # @param code [Symbol, OpenAI::Models::Beta::Threads::Run::LastError::Code] + # @param message [String] # One of `server_error`, `rate_limit_exceeded`, or `invalid_prompt`. # @@ -361,11 +315,8 @@ module Code RATE_LIMIT_EXCEEDED = :rate_limit_exceeded INVALID_PROMPT = :invalid_prompt - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -384,16 +335,12 @@ class RequiredAction < OpenAI::Internal::Type::BaseModel # @return [Symbol, :submit_tool_outputs] required :type, const: :submit_tool_outputs - # @!parse - # # Details on the action required to continue the run. Will be `null` if no action - # # is required. - # # - # # @param submit_tool_outputs [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] - # # @param type [Symbol, :submit_tool_outputs] - # # - # def initialize(submit_tool_outputs:, type: :submit_tool_outputs, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(submit_tool_outputs:, type: :submit_tool_outputs) + # Details on the action required to continue the run. Will be `null` if no action + # is required. + # + # @param submit_tool_outputs [OpenAI::Models::Beta::Threads::Run::RequiredAction::SubmitToolOutputs] + # @param type [Symbol, :submit_tool_outputs] # @see OpenAI::Models::Beta::Threads::Run::RequiredAction#submit_tool_outputs class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel @@ -404,14 +351,10 @@ class SubmitToolOutputs < OpenAI::Internal::Type::BaseModel required :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RequiredActionFunctionToolCall] } - # @!parse - # # Details on the tool outputs needed for this run to continue. - # # - # # @param tool_calls [Array] - # # - # def initialize(tool_calls:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(tool_calls:) + # Details on the tool outputs needed for this run to continue. + # + # @param tool_calls [Array] end end @@ -433,16 +376,12 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :last_messages, Integer, nil?: true - # @!parse - # # Controls for how a thread will be truncated prior to the run. Use this to - # # control the intial context window of the run. - # # - # # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] - # # @param last_messages [Integer, nil] - # # - # def initialize(type:, last_messages: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:, last_messages: nil) + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Models::Beta::Threads::Run::TruncationStrategy::Type] + # @param last_messages [Integer, nil] # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -456,11 +395,8 @@ module Type AUTO = :auto LAST_MESSAGES = :last_messages - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -484,17 +420,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # Usage statistics related to the run. This value will be `null` if the run is not - # # in a terminal state (i.e. `in_progress`, `queued`, etc.). - # # - # # @param completion_tokens [Integer] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(completion_tokens:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:) + # Usage statistics related to the run. This value will be `null` if the run is not + # in a terminal state (i.e. `in_progress`, `queued`, etc.). + # + # @param completion_tokens [Integer] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end end end diff --git a/lib/openai/models/beta/threads/run_cancel_params.rb b/lib/openai/models/beta/threads/run_cancel_params.rb index 0e9b76a0..6067a1a4 100644 --- a/lib/openai/models/beta/threads/run_cancel_params.rb +++ b/lib/openai/models/beta/threads/run_cancel_params.rb @@ -15,13 +15,9 @@ class RunCancelParams < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!parse - # # @param thread_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, request_options: {}) + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index a058e78e..8ae80de8 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -197,51 +197,25 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy }, nil?: true - # @!parse - # # @param assistant_id [String] - # # @param include [Array] - # # @param additional_instructions [String, nil] - # # @param additional_messages [Array, nil] - # # @param instructions [String, nil] - # # @param max_completion_tokens [Integer, nil] - # # @param max_prompt_tokens [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] - # # @param parallel_tool_calls [Boolean] - # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] - # # @param temperature [Float, nil] - # # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] - # # @param tools [Array, nil] - # # @param top_p [Float, nil] - # # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # assistant_id:, - # include: nil, - # additional_instructions: nil, - # additional_messages: nil, - # instructions: nil, - # max_completion_tokens: nil, - # max_prompt_tokens: nil, - # metadata: nil, - # model: nil, - # parallel_tool_calls: nil, - # reasoning_effort: nil, - # response_format: nil, - # temperature: nil, - # tool_choice: nil, - # tools: nil, - # top_p: nil, - # truncation_strategy: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(assistant_id:, include: nil, additional_instructions: nil, additional_messages: nil, instructions: nil, max_completion_tokens: nil, max_prompt_tokens: nil, metadata: nil, model: nil, parallel_tool_calls: nil, reasoning_effort: nil, response_format: nil, temperature: nil, tool_choice: nil, tools: nil, top_p: nil, truncation_strategy: nil, request_options: {}) + # @param assistant_id [String] + # @param include [Array] + # @param additional_instructions [String, nil] + # @param additional_messages [Array, nil] + # @param instructions [String, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_prompt_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel, nil] + # @param parallel_tool_calls [Boolean] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [Symbol, :auto, OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONObject, OpenAI::Models::ResponseFormatJSONSchema, nil] + # @param temperature [Float, nil] + # @param tool_choice [Symbol, OpenAI::Models::Beta::AssistantToolChoiceOption::Auto, OpenAI::Models::Beta::AssistantToolChoice, nil] + # @param tools [Array, nil] + # @param top_p [Float, nil] + # @param truncation_strategy [OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -281,15 +255,11 @@ class AdditionalMessage < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param content [String, Array] - # # @param role [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] - # # @param attachments [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # - # def initialize(content:, role:, attachments: nil, metadata: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, attachments: nil, metadata: nil) + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Role] + # @param attachments [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] # The text contents of the message. # @@ -303,9 +273,8 @@ module Content # An array of content parts with a defined type, each can be of type `text` or images can be passed with `image_url` or `image_file`. Image types are only supported on [Vision-compatible models](https://platform.openai.com/docs/models). variant -> { OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Content::MessageContentPartParamArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] MessageContentPartParamArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Beta::Threads::MessageContentPartParam }] @@ -325,11 +294,8 @@ module Role USER = :user ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class Attachment < OpenAI::Internal::Type::BaseModel @@ -354,13 +320,9 @@ class Attachment < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tools - # @!parse - # # @param file_id [String] - # # @param tools [Array] - # # - # def initialize(file_id: nil, tools: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, tools: nil) + # @param file_id [String] + # @param tools [Array] module Tool extend OpenAI::Internal::Type::Union @@ -379,17 +341,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param type [Symbol, :file_search] - # # - # def initialize(type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :file_search) + # @param type [Symbol, :file_search] end - # @!parse - # # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::CodeInterpreterTool, OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool::FileSearch)] end end end @@ -406,9 +363,8 @@ module Model # The ID of the [Model](https://platform.openai.com/docs/api-reference/models) to be used to execute this run. If a value is provided here, it will override the model associated with the assistant. If not, the model associated with the assistant will be used. variant enum: -> { OpenAI::Models::ChatModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end class TruncationStrategy < OpenAI::Internal::Type::BaseModel @@ -428,16 +384,12 @@ class TruncationStrategy < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :last_messages, Integer, nil?: true - # @!parse - # # Controls for how a thread will be truncated prior to the run. Use this to - # # control the intial context window of the run. - # # - # # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] - # # @param last_messages [Integer, nil] - # # - # def initialize(type:, last_messages: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:, last_messages: nil) + # Controls for how a thread will be truncated prior to the run. Use this to + # control the intial context window of the run. + # + # @param type [Symbol, OpenAI::Models::Beta::Threads::RunCreateParams::TruncationStrategy::Type] + # @param last_messages [Integer, nil] # The truncation strategy to use for the thread. The default is `auto`. If set to # `last_messages`, the thread will be truncated to the n most recent messages in @@ -451,11 +403,8 @@ module Type AUTO = :auto LAST_MESSAGES = :last_messages - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index 3d4e377b..dd2e424a 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -58,16 +58,12 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param before [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -77,11 +73,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/run_retrieve_params.rb b/lib/openai/models/beta/threads/run_retrieve_params.rb index ca7aaf1d..464d303b 100644 --- a/lib/openai/models/beta/threads/run_retrieve_params.rb +++ b/lib/openai/models/beta/threads/run_retrieve_params.rb @@ -15,13 +15,9 @@ class RunRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!parse - # # @param thread_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, request_options: {}) + # @param thread_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/run_status.rb b/lib/openai/models/beta/threads/run_status.rb index d17f7dc6..cde93d93 100644 --- a/lib/openai/models/beta/threads/run_status.rb +++ b/lib/openai/models/beta/threads/run_status.rb @@ -20,11 +20,8 @@ module RunStatus INCOMPLETE = :incomplete EXPIRED = :expired - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 2243cb1d..16181aa3 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -24,14 +24,10 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel required :tool_outputs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::RunSubmitToolOutputsParams::ToolOutput] } - # @!parse - # # @param thread_id [String] - # # @param tool_outputs [Array] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, tool_outputs:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, tool_outputs:, request_options: {}) + # @param thread_id [String] + # @param tool_outputs [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolOutput < OpenAI::Internal::Type::BaseModel # @!attribute [r] output @@ -55,13 +51,9 @@ class ToolOutput < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :tool_call_id - # @!parse - # # @param output [String] - # # @param tool_call_id [String] - # # - # def initialize(output: nil, tool_call_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(output: nil, tool_call_id: nil) + # @param output [String] + # @param tool_call_id [String] end end end diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index 68b6536b..ddfe60d0 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -26,14 +26,10 @@ class RunUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param thread_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, metadata: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, metadata: nil, request_options: {}) + # @param thread_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb index 9d329390..e1c7ac7f 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb @@ -28,16 +28,12 @@ class CodeInterpreterLogs < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :logs - # @!parse - # # Text output from the Code Interpreter tool call as part of a run step. - # # - # # @param index [Integer] - # # @param logs [String] - # # @param type [Symbol, :logs] - # # - # def initialize(index:, logs: nil, type: :logs, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, logs: nil, type: :logs) + # Text output from the Code Interpreter tool call as part of a run step. + # + # @param index [Integer] + # @param logs [String] + # @param type [Symbol, :logs] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index ab77db4e..d46e7d33 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -27,14 +27,10 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] # attr_writer :image - # @!parse - # # @param index [Integer] - # # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] - # # @param type [Symbol, :image] - # # - # def initialize(index:, image: nil, type: :image, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, image: nil, type: :image) + # @param index [Integer] + # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] + # @param type [Symbol, :image] # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage#image class Image < OpenAI::Internal::Type::BaseModel @@ -49,12 +45,8 @@ class Image < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :file_id - # @!parse - # # @param file_id [String] - # # - # def initialize(file_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil) + # @param file_id [String] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb index 60262e17..8c7683ea 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call.rb @@ -26,16 +26,12 @@ class CodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter] required :type, const: :code_interpreter - # @!parse - # # Details of the Code Interpreter tool call the run step was involved in. - # # - # # @param id [String] - # # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] - # # @param type [Symbol, :code_interpreter] - # # - # def initialize(id:, code_interpreter:, type: :code_interpreter, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, code_interpreter:, type: :code_interpreter) + # Details of the Code Interpreter tool call the run step was involved in. + # + # @param id [String] + # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter] + # @param type [Symbol, :code_interpreter] # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -54,15 +50,11 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel required :outputs, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output] } - # @!parse - # # The Code Interpreter tool call definition. - # # - # # @param input [String] - # # @param outputs [Array] - # # - # def initialize(input:, outputs:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, outputs:) + # The Code Interpreter tool call definition. + # + # @param input [String] + # @param outputs [Array] # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -90,15 +82,11 @@ class Logs < OpenAI::Internal::Type::BaseModel # @return [Symbol, :logs] required :type, const: :logs - # @!parse - # # Text output from the Code Interpreter tool call as part of a run step. - # # - # # @param logs [String] - # # @param type [Symbol, :logs] - # # - # def initialize(logs:, type: :logs, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(logs:, type: :logs) + # Text output from the Code Interpreter tool call as part of a run step. + # + # @param logs [String] + # @param type [Symbol, :logs] end class Image < OpenAI::Internal::Type::BaseModel @@ -114,13 +102,9 @@ class Image < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image] required :type, const: :image - # @!parse - # # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] - # # @param type [Symbol, :image] - # # - # def initialize(image:, type: :image, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image:, type: :image) + # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image::Image] + # @param type [Symbol, :image] # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image#image class Image < OpenAI::Internal::Type::BaseModel @@ -131,18 +115,13 @@ class Image < OpenAI::Internal::Type::BaseModel # @return [String] required :file_id, String - # @!parse - # # @param file_id [String] - # # - # def initialize(file_id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:) + # @param file_id [String] end end - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Logs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall::CodeInterpreter::Output::Image)] end end end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index a9cbef61..3b041752 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -40,17 +40,13 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] # attr_writer :code_interpreter - # @!parse - # # Details of the Code Interpreter tool call the run step was involved in. - # # - # # @param index [Integer] - # # @param id [String] - # # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] - # # @param type [Symbol, :code_interpreter] - # # - # def initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter) + # Details of the Code Interpreter tool call the run step was involved in. + # + # @param index [Integer] + # @param id [String] + # @param code_interpreter [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] + # @param type [Symbol, :code_interpreter] # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel @@ -77,15 +73,11 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :outputs - # @!parse - # # The Code Interpreter tool call definition. - # # - # # @param input [String] - # # @param outputs [Array] - # # - # def initialize(input: nil, outputs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input: nil, outputs: nil) + # The Code Interpreter tool call definition. + # + # @param input [String] + # @param outputs [Array] # Text output from the Code Interpreter tool call as part of a run step. module Output @@ -98,9 +90,8 @@ module Output variant :image, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterLogs, OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage)] end end end diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 3c6c423a..9ab8ead6 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -25,14 +25,10 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!parse - # # @param id [String] - # # @param file_search [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] - # # @param type [Symbol, :file_search] - # # - # def initialize(id:, file_search:, type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, file_search:, type: :file_search) + # @param id [String] + # @param file_search [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch] + # @param type [Symbol, :file_search] # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall#file_search class FileSearch < OpenAI::Internal::Type::BaseModel @@ -58,15 +54,11 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :results - # @!parse - # # For now, this is always going to be an empty object. - # # - # # @param ranking_options [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] - # # @param results [Array] - # # - # def initialize(ranking_options: nil, results: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(ranking_options: nil, results: nil) + # For now, this is always going to be an empty object. + # + # @param ranking_options [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] + # @param results [Array] # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel @@ -85,15 +77,11 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @return [Float] required :score_threshold, Float - # @!parse - # # The ranking options for the file search. - # # - # # @param ranker [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] - # # @param score_threshold [Float] - # # - # def initialize(ranker:, score_threshold:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(ranker:, score_threshold:) + # The ranking options for the file search. + # + # @param ranker [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions::Ranker] + # @param score_threshold [Float] # The ranker to use for the file search. If not specified will use the `auto` # ranker. @@ -105,11 +93,8 @@ module Ranker AUTO = :auto DEFAULT_2024_08_21 = :default_2024_08_21 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -145,17 +130,13 @@ class Result < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :content - # @!parse - # # A result instance of the file search. - # # - # # @param file_id [String] - # # @param file_name [String] - # # @param score [Float] - # # @param content [Array] - # # - # def initialize(file_id:, file_name:, score:, content: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, file_name:, score:, content: nil) + # A result instance of the file search. + # + # @param file_id [String] + # @param file_name [String] + # @param score [Float] + # @param content [Array] class Content < OpenAI::Internal::Type::BaseModel # @!attribute [r] text @@ -179,13 +160,9 @@ class Content < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] # attr_writer :type - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] - # # - # def initialize(text: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text: nil, type: nil) + # @param text [String] + # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] # The type of the content. # @@ -195,11 +172,8 @@ module Type TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb index 4fa2dc05..c4e4e5e3 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb @@ -35,15 +35,11 @@ class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :id - # @!parse - # # @param file_search [Object] - # # @param index [Integer] - # # @param id [String] - # # @param type [Symbol, :file_search] - # # - # def initialize(file_search:, index:, id: nil, type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_search:, index:, id: nil, type: :file_search) + # @param file_search [Object] + # @param index [Integer] + # @param id [String] + # @param type [Symbol, :file_search] end end end diff --git a/lib/openai/models/beta/threads/runs/function_tool_call.rb b/lib/openai/models/beta/threads/runs/function_tool_call.rb index cc1eb09a..eb5ccad9 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call.rb @@ -25,14 +25,10 @@ class FunctionToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # @param id [String] - # # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] - # # @param type [Symbol, :function] - # # - # def initialize(id:, function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, function:, type: :function) + # @param id [String] + # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCall::Function] + # @param type [Symbol, :function] # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -56,16 +52,12 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String, nil] required :output, String, nil?: true - # @!parse - # # The definition of the function that was called. - # # - # # @param arguments [String] - # # @param name [String] - # # @param output [String, nil] - # # - # def initialize(arguments:, name:, output:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, name:, output:) + # The definition of the function that was called. + # + # @param arguments [String] + # @param name [String] + # @param output [String, nil] end end end diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index 6a164004..fe7116d5 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -39,15 +39,11 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] # attr_writer :function - # @!parse - # # @param index [Integer] - # # @param id [String] - # # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] - # # @param type [Symbol, :function] - # # - # def initialize(index:, id: nil, function: nil, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, id: nil, function: nil, type: :function) + # @param index [Integer] + # @param id [String] + # @param function [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] + # @param type [Symbol, :function] # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta#function class Function < OpenAI::Internal::Type::BaseModel @@ -79,16 +75,12 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :output, String, nil?: true - # @!parse - # # The definition of the function that was called. - # # - # # @param arguments [String] - # # @param name [String] - # # @param output [String, nil] - # # - # def initialize(arguments: nil, name: nil, output: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments: nil, name: nil, output: nil) + # The definition of the function that was called. + # + # @param arguments [String] + # @param name [String] + # @param output [String, nil] end end end diff --git a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb index 12b7b685..575eb64f 100644 --- a/lib/openai/models/beta/threads/runs/message_creation_step_details.rb +++ b/lib/openai/models/beta/threads/runs/message_creation_step_details.rb @@ -18,15 +18,11 @@ class MessageCreationStepDetails < OpenAI::Internal::Type::BaseModel # @return [Symbol, :message_creation] required :type, const: :message_creation - # @!parse - # # Details of the message creation by the run step. - # # - # # @param message_creation [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] - # # @param type [Symbol, :message_creation] - # # - # def initialize(message_creation:, type: :message_creation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(message_creation:, type: :message_creation) + # Details of the message creation by the run step. + # + # @param message_creation [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails::MessageCreation] + # @param type [Symbol, :message_creation] # @see OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel @@ -36,12 +32,8 @@ class MessageCreation < OpenAI::Internal::Type::BaseModel # @return [String] required :message_id, String - # @!parse - # # @param message_id [String] - # # - # def initialize(message_id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(message_id:) + # @param message_id [String] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step.rb b/lib/openai/models/beta/threads/runs/run_step.rb index a4111bd4..d4fe0c67 100644 --- a/lib/openai/models/beta/threads/runs/run_step.rb +++ b/lib/openai/models/beta/threads/runs/run_step.rb @@ -116,49 +116,25 @@ class RunStep < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] required :usage, -> { OpenAI::Models::Beta::Threads::Runs::RunStep::Usage }, nil?: true - # @!parse - # # Represents a step in execution of a run. - # # - # # @param id [String] - # # @param assistant_id [String] - # # @param cancelled_at [Integer, nil] - # # @param completed_at [Integer, nil] - # # @param created_at [Integer] - # # @param expired_at [Integer, nil] - # # @param failed_at [Integer, nil] - # # @param last_error [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param run_id [String] - # # @param status [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] - # # @param step_details [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] - # # @param thread_id [String] - # # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] - # # @param usage [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] - # # @param object [Symbol, :"thread.run.step"] - # # - # def initialize( - # id:, - # assistant_id:, - # cancelled_at:, - # completed_at:, - # created_at:, - # expired_at:, - # failed_at:, - # last_error:, - # metadata:, - # run_id:, - # status:, - # step_details:, - # thread_id:, - # type:, - # usage:, - # object: :"thread.run.step", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, assistant_id:, cancelled_at:, completed_at:, created_at:, expired_at:, failed_at:, last_error:, metadata:, run_id:, status:, step_details:, thread_id:, type:, usage:, object: :"thread.run.step") + # Represents a step in execution of a run. + # + # @param id [String] + # @param assistant_id [String] + # @param cancelled_at [Integer, nil] + # @param completed_at [Integer, nil] + # @param created_at [Integer] + # @param expired_at [Integer, nil] + # @param failed_at [Integer, nil] + # @param last_error [OpenAI::Models::Beta::Threads::Runs::RunStep::LastError, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param run_id [String] + # @param status [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Status] + # @param step_details [OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails] + # @param thread_id [String] + # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::Type] + # @param usage [OpenAI::Models::Beta::Threads::Runs::RunStep::Usage, nil] + # @param object [Symbol, :"thread.run.step"] # @see OpenAI::Models::Beta::Threads::Runs::RunStep#last_error class LastError < OpenAI::Internal::Type::BaseModel @@ -174,16 +150,12 @@ class LastError < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # The last error associated with this run step. Will be `null` if there are no - # # errors. - # # - # # @param code [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] - # # @param message [String] - # # - # def initialize(code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:) + # The last error associated with this run step. Will be `null` if there are no + # errors. + # + # @param code [Symbol, OpenAI::Models::Beta::Threads::Runs::RunStep::LastError::Code] + # @param message [String] # One of `server_error` or `rate_limit_exceeded`. # @@ -194,11 +166,8 @@ module Code SERVER_ERROR = :server_error RATE_LIMIT_EXCEEDED = :rate_limit_exceeded - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -215,11 +184,8 @@ module Status COMPLETED = :completed EXPIRED = :expired - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The details of the run step. @@ -236,9 +202,8 @@ module StepDetails # Details of the tool call. variant :tool_calls, -> { OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::MessageCreationStepDetails, OpenAI::Models::Beta::Threads::Runs::ToolCallsStepDetails)] end # The type of run step, which can be either `message_creation` or `tool_calls`. @@ -250,11 +215,8 @@ module Type MESSAGE_CREATION = :message_creation TOOL_CALLS = :tool_calls - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Beta::Threads::Runs::RunStep#usage @@ -277,17 +239,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # Usage statistics related to the run step. This value will be `null` while the - # # run step's status is `in_progress`. - # # - # # @param completion_tokens [Integer] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(completion_tokens:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:) + # Usage statistics related to the run step. This value will be `null` while the + # run step's status is `in_progress`. + # + # @param completion_tokens [Integer] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index 15e16864..10ae040b 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -16,14 +16,10 @@ class RunStepDelta < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] # attr_writer :step_details - # @!parse - # # The delta containing the fields that have changed on the run step. - # # - # # @param step_details [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] - # # - # def initialize(step_details: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(step_details: nil) + # The delta containing the fields that have changed on the run step. + # + # @param step_details [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] # The details of the run step. # @@ -39,9 +35,8 @@ module StepDetails # Details of the tool call. variant :tool_calls, -> { OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject)] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb index a71b9858..4d83e30e 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_event.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_event.rb @@ -24,17 +24,13 @@ class RunStepDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.run.step.delta"] required :object, const: :"thread.run.step.delta" - # @!parse - # # Represents a run step delta i.e. any changed fields on a run step during - # # streaming. - # # - # # @param id [String] - # # @param delta [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] - # # @param object [Symbol, :"thread.run.step.delta"] - # # - # def initialize(id:, delta:, object: :"thread.run.step.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, delta:, object: :"thread.run.step.delta") + # Represents a run step delta i.e. any changed fields on a run step during + # streaming. + # + # @param id [String] + # @param delta [OpenAI::Models::Beta::Threads::Runs::RunStepDelta] + # @param object [Symbol, :"thread.run.step.delta"] end end diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index 178763ff..c57bba4e 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -22,15 +22,11 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] # attr_writer :message_creation - # @!parse - # # Details of the message creation by the run step. - # # - # # @param message_creation [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] - # # @param type [Symbol, :message_creation] - # # - # def initialize(message_creation: nil, type: :message_creation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(message_creation: nil, type: :message_creation) + # Details of the message creation by the run step. + # + # @param message_creation [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] + # @param type [Symbol, :message_creation] # @see OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel @@ -44,12 +40,8 @@ class MessageCreation < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :message_id - # @!parse - # # @param message_id [String] - # # - # def initialize(message_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(message_id: nil) + # @param message_id [String] end end end diff --git a/lib/openai/models/beta/threads/runs/run_step_include.rb b/lib/openai/models/beta/threads/runs/run_step_include.rb index 4c7fe791..5253cc7e 100644 --- a/lib/openai/models/beta/threads/runs/run_step_include.rb +++ b/lib/openai/models/beta/threads/runs/run_step_include.rb @@ -11,11 +11,8 @@ module RunStepInclude STEP_DETAILS_TOOL_CALLS_FILE_SEARCH_RESULTS_CONTENT = :"step_details.tool_calls[*].file_search.results[*].content" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 1b281161..4f484a34 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -81,18 +81,14 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] # attr_writer :order - # @!parse - # # @param thread_id [String] - # # @param after [String] - # # @param before [String] - # # @param include [Array] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) + # @param thread_id [String] + # @param after [String] + # @param before [String] + # @param include [Array] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -102,11 +98,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index a02c8ce0..3e6934b7 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -38,15 +38,11 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :include - # @!parse - # # @param thread_id [String] - # # @param run_id [String] - # # @param include [Array] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(thread_id:, run_id:, include: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(thread_id:, run_id:, include: nil, request_options: {}) + # @param thread_id [String] + # @param run_id [String] + # @param include [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call.rb b/lib/openai/models/beta/threads/runs/tool_call.rb index eaee8252..5e18fa3b 100644 --- a/lib/openai/models/beta/threads/runs/tool_call.rb +++ b/lib/openai/models/beta/threads/runs/tool_call.rb @@ -18,9 +18,8 @@ module ToolCall variant :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCall } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCall, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall, OpenAI::Models::Beta::Threads::Runs::FunctionToolCall)] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta.rb b/lib/openai/models/beta/threads/runs/tool_call_delta.rb index a99db2d3..e5cd0aff 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta.rb @@ -18,9 +18,8 @@ module ToolCallDelta variant :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta } - # @!parse - # # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCallDelta, OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta)] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb index 8c1394b6..ca02ab4c 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb @@ -25,15 +25,11 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tool_calls - # @!parse - # # Details of the tool call. - # # - # # @param tool_calls [Array] - # # @param type [Symbol, :tool_calls] - # # - # def initialize(tool_calls: nil, type: :tool_calls, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(tool_calls: nil, type: :tool_calls) + # Details of the tool call. + # + # @param tool_calls [Array] + # @param type [Symbol, :tool_calls] end end end diff --git a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb index 1ac644f3..08393ac6 100644 --- a/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb +++ b/lib/openai/models/beta/threads/runs/tool_calls_step_details.rb @@ -21,15 +21,11 @@ class ToolCallsStepDetails < OpenAI::Internal::Type::BaseModel # @return [Symbol, :tool_calls] required :type, const: :tool_calls - # @!parse - # # Details of the tool call. - # # - # # @param tool_calls [Array] - # # @param type [Symbol, :tool_calls] - # # - # def initialize(tool_calls:, type: :tool_calls, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(tool_calls:, type: :tool_calls) + # Details of the tool call. + # + # @param tool_calls [Array] + # @param type [Symbol, :tool_calls] end end end diff --git a/lib/openai/models/beta/threads/text.rb b/lib/openai/models/beta/threads/text.rb index 35eb23bc..aa0282e1 100644 --- a/lib/openai/models/beta/threads/text.rb +++ b/lib/openai/models/beta/threads/text.rb @@ -17,13 +17,9 @@ class Text < OpenAI::Internal::Type::BaseModel # @return [String] required :value, String - # @!parse - # # @param annotations [Array] - # # @param value [String] - # # - # def initialize(annotations:, value:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(annotations:, value:) + # @param annotations [Array] + # @param value [String] end end end diff --git a/lib/openai/models/beta/threads/text_content_block.rb b/lib/openai/models/beta/threads/text_content_block.rb index 0e90d460..7d8eb24b 100644 --- a/lib/openai/models/beta/threads/text_content_block.rb +++ b/lib/openai/models/beta/threads/text_content_block.rb @@ -16,15 +16,11 @@ class TextContentBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!parse - # # The text content that is part of a message. - # # - # # @param text [OpenAI::Models::Beta::Threads::Text] - # # @param type [Symbol, :text] - # # - # def initialize(text:, type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :text) + # The text content that is part of a message. + # + # @param text [OpenAI::Models::Beta::Threads::Text] + # @param type [Symbol, :text] end end end diff --git a/lib/openai/models/beta/threads/text_content_block_param.rb b/lib/openai/models/beta/threads/text_content_block_param.rb index 9cbf24b3..26880392 100644 --- a/lib/openai/models/beta/threads/text_content_block_param.rb +++ b/lib/openai/models/beta/threads/text_content_block_param.rb @@ -17,15 +17,11 @@ class TextContentBlockParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!parse - # # The text content that is part of a message. - # # - # # @param text [String] - # # @param type [Symbol, :text] - # # - # def initialize(text:, type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :text) + # The text content that is part of a message. + # + # @param text [String] + # @param type [Symbol, :text] end end end diff --git a/lib/openai/models/beta/threads/text_delta.rb b/lib/openai/models/beta/threads/text_delta.rb index 978ecd2c..cbac0c09 100644 --- a/lib/openai/models/beta/threads/text_delta.rb +++ b/lib/openai/models/beta/threads/text_delta.rb @@ -25,13 +25,9 @@ class TextDelta < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :value - # @!parse - # # @param annotations [Array] - # # @param value [String] - # # - # def initialize(annotations: nil, value: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(annotations: nil, value: nil) + # @param annotations [Array] + # @param value [String] end end end diff --git a/lib/openai/models/beta/threads/text_delta_block.rb b/lib/openai/models/beta/threads/text_delta_block.rb index 7c1b9ecc..12701fae 100644 --- a/lib/openai/models/beta/threads/text_delta_block.rb +++ b/lib/openai/models/beta/threads/text_delta_block.rb @@ -26,16 +26,12 @@ class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Beta::Threads::TextDelta] # attr_writer :text - # @!parse - # # The text content that is part of a message. - # # - # # @param index [Integer] - # # @param text [OpenAI::Models::Beta::Threads::TextDelta] - # # @param type [Symbol, :text] - # # - # def initialize(index:, text: nil, type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, text: nil, type: :text) + # The text content that is part of a message. + # + # @param index [Integer] + # @param text [OpenAI::Models::Beta::Threads::TextDelta] + # @param type [Symbol, :text] end end end diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 1ea5a81f..8e808b6e 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -83,34 +83,18 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::CompletionUsage] # attr_writer :usage - # @!parse - # # Represents a chat completion response returned by model, based on the provided - # # input. - # # - # # @param id [String] - # # @param choices [Array] - # # @param created [Integer] - # # @param model [String] - # # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] - # # @param system_fingerprint [String] - # # @param usage [OpenAI::Models::CompletionUsage] - # # @param object [Symbol, :"chat.completion"] - # # - # def initialize( - # id:, - # choices:, - # created:, - # model:, - # service_tier: nil, - # system_fingerprint: nil, - # usage: nil, - # object: :"chat.completion", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion") + # Represents a chat completion response returned by model, based on the provided + # input. + # + # @param id [String] + # @param choices [Array] + # @param created [Integer] + # @param model [String] + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] + # @param system_fingerprint [String] + # @param usage [OpenAI::Models::CompletionUsage] + # @param object [Symbol, :"chat.completion"] class Choice < OpenAI::Internal::Type::BaseModel # @!attribute finish_reason @@ -142,15 +126,11 @@ class Choice < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::ChatCompletionMessage] required :message, -> { OpenAI::Models::Chat::ChatCompletionMessage } - # @!parse - # # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] - # # @param index [Integer] - # # @param logprobs [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] - # # @param message [OpenAI::Models::Chat::ChatCompletionMessage] - # # - # def initialize(finish_reason:, index:, logprobs:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(finish_reason:, index:, logprobs:, message:) + # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletion::Choice::FinishReason] + # @param index [Integer] + # @param logprobs [OpenAI::Models::Chat::ChatCompletion::Choice::Logprobs, nil] + # @param message [OpenAI::Models::Chat::ChatCompletionMessage] # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -169,11 +149,8 @@ module FinishReason CONTENT_FILTER = :content_filter FUNCTION_CALL = :function_call - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Chat::ChatCompletion::Choice#logprobs @@ -194,15 +171,11 @@ class Logprobs < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, nil?: true - # @!parse - # # Log probability information for the choice. - # # - # # @param content [Array, nil] - # # @param refusal [Array, nil] - # # - # def initialize(content:, refusal:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, refusal:) + # Log probability information for the choice. + # + # @param content [Array, nil] + # @param refusal [Array, nil] end end @@ -232,11 +205,8 @@ module ServiceTier DEFAULT = :default FLEX = :flex - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index 992b3818..bf6a5b2e 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -63,31 +63,16 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tool_calls - # @!parse - # # Messages sent by the model in response to user messages. - # # - # # @param audio [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] - # # @param content [String, Array, nil] - # # @param function_call [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] - # # @param name [String] - # # @param refusal [String, nil] - # # @param tool_calls [Array] - # # @param role [Symbol, :assistant] - # # - # def initialize( - # audio: nil, - # content: nil, - # function_call: nil, - # name: nil, - # refusal: nil, - # tool_calls: nil, - # role: :assistant, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(audio: nil, content: nil, function_call: nil, name: nil, refusal: nil, tool_calls: nil, role: :assistant) + # Messages sent by the model in response to user messages. + # + # @param audio [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Audio, nil] + # @param content [String, Array, nil] + # @param function_call [OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall, nil] + # @param name [String] + # @param refusal [String, nil] + # @param tool_calls [Array] + # @param role [Symbol, :assistant] # @see OpenAI::Models::Chat::ChatCompletionAssistantMessageParam#audio class Audio < OpenAI::Internal::Type::BaseModel @@ -97,15 +82,11 @@ class Audio < OpenAI::Internal::Type::BaseModel # @return [String] required :id, String - # @!parse - # # Data about a previous audio response from the model. - # # [Learn more](https://platform.openai.com/docs/guides/audio). - # # - # # @param id [String] - # # - # def initialize(id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:) + # Data about a previous audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). + # + # @param id [String] end # The contents of the assistant message. Required unless `tool_calls` or @@ -133,14 +114,12 @@ module ArrayOfContentPart variant :refusal, -> { OpenAI::Models::Chat::ChatCompletionContentPartRefusal } - # @!parse - # # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartRefusal)] end - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ArrayOfContentPartArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::Content::ArrayOfContentPart }] @@ -165,16 +144,12 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # # that should be called, as generated by the model. - # # - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments:, name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, name:) + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. + # + # @param arguments [String] + # @param name [String] end end end diff --git a/lib/openai/models/chat/chat_completion_audio.rb b/lib/openai/models/chat/chat_completion_audio.rb index 238b8a6b..e56cae38 100644 --- a/lib/openai/models/chat/chat_completion_audio.rb +++ b/lib/openai/models/chat/chat_completion_audio.rb @@ -30,19 +30,15 @@ class ChatCompletionAudio < OpenAI::Internal::Type::BaseModel # @return [String] required :transcript, String - # @!parse - # # If the audio output modality is requested, this object contains data about the - # # audio response from the model. - # # [Learn more](https://platform.openai.com/docs/guides/audio). - # # - # # @param id [String] - # # @param data [String] - # # @param expires_at [Integer] - # # @param transcript [String] - # # - # def initialize(id:, data:, expires_at:, transcript:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, data:, expires_at:, transcript:) + # If the audio output modality is requested, this object contains data about the + # audio response from the model. + # [Learn more](https://platform.openai.com/docs/guides/audio). + # + # @param id [String] + # @param data [String] + # @param expires_at [Integer] + # @param transcript [String] end end diff --git a/lib/openai/models/chat/chat_completion_audio_param.rb b/lib/openai/models/chat/chat_completion_audio_param.rb index 1e69e4cf..3374567d 100644 --- a/lib/openai/models/chat/chat_completion_audio_param.rb +++ b/lib/openai/models/chat/chat_completion_audio_param.rb @@ -18,17 +18,13 @@ class ChatCompletionAudioParam < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] required :voice, union: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice } - # @!parse - # # Parameters for audio output. Required when audio output is requested with - # # `modalities: ["audio"]`. - # # [Learn more](https://platform.openai.com/docs/guides/audio). - # # - # # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] - # # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] - # # - # def initialize(format_:, voice:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(format_:, voice:) + # Parameters for audio output. Required when audio output is requested with + # `modalities: ["audio"]`. + # [Learn more](https://platform.openai.com/docs/guides/audio). + # + # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Format] + # @param voice [String, Symbol, OpenAI::Models::Chat::ChatCompletionAudioParam::Voice] # Specifies the output audio format. Must be one of `wav`, `mp3`, `flac`, `opus`, # or `pcm16`. @@ -44,11 +40,8 @@ module Format OPUS = :opus PCM16 = :pcm16 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The voice the model uses to respond. Supported voices are `alloy`, `ash`, @@ -82,9 +75,8 @@ module Voice variant const: -> { OpenAI::Models::Chat::ChatCompletionAudioParam::Voice::VERSE } - # @!parse - # # @return [Array(String, Symbol)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol)] # @!group diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 0a201e76..61a57392 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -84,35 +84,19 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::Models::CompletionUsage }, nil?: true - # @!parse - # # Represents a streamed chunk of a chat completion response returned by the model, - # # based on the provided input. - # # [Learn more](https://platform.openai.com/docs/guides/streaming-responses). - # # - # # @param id [String] - # # @param choices [Array] - # # @param created [Integer] - # # @param model [String] - # # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] - # # @param system_fingerprint [String] - # # @param usage [OpenAI::Models::CompletionUsage, nil] - # # @param object [Symbol, :"chat.completion.chunk"] - # # - # def initialize( - # id:, - # choices:, - # created:, - # model:, - # service_tier: nil, - # system_fingerprint: nil, - # usage: nil, - # object: :"chat.completion.chunk", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion.chunk") + # Represents a streamed chunk of a chat completion response returned by the model, + # based on the provided input. + # [Learn more](https://platform.openai.com/docs/guides/streaming-responses). + # + # @param id [String] + # @param choices [Array] + # @param created [Integer] + # @param model [String] + # @param service_tier [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] + # @param system_fingerprint [String] + # @param usage [OpenAI::Models::CompletionUsage, nil] + # @param object [Symbol, :"chat.completion.chunk"] class Choice < OpenAI::Internal::Type::BaseModel # @!attribute delta @@ -146,15 +130,11 @@ class Choice < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] optional :logprobs, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs }, nil?: true - # @!parse - # # @param delta [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] - # # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] - # # @param index [Integer] - # # @param logprobs [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] - # # - # def initialize(delta:, finish_reason:, index:, logprobs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, finish_reason:, index:, logprobs: nil) + # @param delta [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta] + # @param finish_reason [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::FinishReason, nil] + # @param index [Integer] + # @param logprobs [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Logprobs, nil] # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#delta class Delta < OpenAI::Internal::Type::BaseModel @@ -201,18 +181,14 @@ class Delta < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tool_calls - # @!parse - # # A chat completion delta generated by streamed model responses. - # # - # # @param content [String, nil] - # # @param function_call [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] - # # @param refusal [String, nil] - # # @param role [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] - # # @param tool_calls [Array] - # # - # def initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil) + # A chat completion delta generated by streamed model responses. + # + # @param content [String, nil] + # @param function_call [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] + # @param refusal [String, nil] + # @param role [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] + # @param tool_calls [Array] # @deprecated # @@ -241,16 +217,12 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # # that should be called, as generated by the model. - # # - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments: nil, name: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments: nil, name: nil) + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. + # + # @param arguments [String] + # @param name [String] end # The role of the author of this message. @@ -265,11 +237,8 @@ module Role ASSISTANT = :assistant TOOL = :tool - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class ToolCall < OpenAI::Internal::Type::BaseModel @@ -307,15 +276,11 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] # attr_writer :type - # @!parse - # # @param index [Integer] - # # @param id [String] - # # @param function [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] - # # @param type [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] - # # - # def initialize(index:, id: nil, function: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(index:, id: nil, function: nil, type: nil) + # @param index [Integer] + # @param id [String] + # @param function [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] + # @param type [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -342,13 +307,9 @@ class Function < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments: nil, name: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments: nil, name: nil) + # @param arguments [String] + # @param name [String] end # The type of the tool. Currently, only `function` is supported. @@ -359,11 +320,8 @@ module Type FUNCTION = :function - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end @@ -385,11 +343,8 @@ module FinishReason CONTENT_FILTER = :content_filter FUNCTION_CALL = :function_call - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice#logprobs @@ -410,15 +365,11 @@ class Logprobs < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob] }, nil?: true - # @!parse - # # Log probability information for the choice. - # # - # # @param content [Array, nil] - # # @param refusal [Array, nil] - # # - # def initialize(content:, refusal:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, refusal:) + # Log probability information for the choice. + # + # @param content [Array, nil] + # @param refusal [Array, nil] end end @@ -448,11 +399,8 @@ module ServiceTier DEFAULT = :default FLEX = :flex - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index 31635ff4..af75a6c6 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -34,16 +34,12 @@ class File < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file] required :type, const: :file - # @!parse - # # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text - # # generation. - # # - # # @param file [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] - # # @param type [Symbol, :file] - # # - # def initialize(file:, type: :file, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file:, type: :file) + # Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text + # generation. + # + # @param file [OpenAI::Models::Chat::ChatCompletionContentPart::File::File] + # @param type [Symbol, :file] # @see OpenAI::Models::Chat::ChatCompletionContentPart::File#file class File < OpenAI::Internal::Type::BaseModel @@ -78,20 +74,15 @@ class File < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :filename - # @!parse - # # @param file_data [String] - # # @param file_id [String] - # # @param filename [String] - # # - # def initialize(file_data: nil, file_id: nil, filename: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_data: nil, file_id: nil, filename: nil) + # @param file_data [String] + # @param file_id [String] + # @param filename [String] end end - # @!parse - # # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Chat::ChatCompletionContentPartText, OpenAI::Models::Chat::ChatCompletionContentPartImage, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio, OpenAI::Models::Chat::ChatCompletionContentPart::File)] end end diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index a22c144e..023fa1d0 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -15,15 +15,11 @@ class ChatCompletionContentPartImage < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_url] required :type, const: :image_url - # @!parse - # # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). - # # - # # @param image_url [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] - # # @param type [Symbol, :image_url] - # # - # def initialize(image_url:, type: :image_url, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image_url:, type: :image_url) + # Learn about [image inputs](https://platform.openai.com/docs/guides/vision). + # + # @param image_url [OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL] + # @param type [Symbol, :image_url] # @see OpenAI::Models::Chat::ChatCompletionContentPartImage#image_url class ImageURL < OpenAI::Internal::Type::BaseModel @@ -44,13 +40,9 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] # attr_writer :detail - # @!parse - # # @param url [String] - # # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] - # # - # def initialize(url:, detail: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(url:, detail: nil) + # @param url [String] + # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). @@ -63,11 +55,8 @@ module Detail LOW = :low HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb index 46b3b077..9331e125 100644 --- a/lib/openai/models/chat/chat_completion_content_part_input_audio.rb +++ b/lib/openai/models/chat/chat_completion_content_part_input_audio.rb @@ -15,15 +15,11 @@ class ChatCompletionContentPartInputAudio < OpenAI::Internal::Type::BaseModel # @return [Symbol, :input_audio] required :type, const: :input_audio - # @!parse - # # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). - # # - # # @param input_audio [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] - # # @param type [Symbol, :input_audio] - # # - # def initialize(input_audio:, type: :input_audio, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input_audio:, type: :input_audio) + # Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). + # + # @param input_audio [OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio] + # @param type [Symbol, :input_audio] # @see OpenAI::Models::Chat::ChatCompletionContentPartInputAudio#input_audio class InputAudio < OpenAI::Internal::Type::BaseModel @@ -41,13 +37,9 @@ class InputAudio < OpenAI::Internal::Type::BaseModel enum: -> { OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format }, api_name: :format - # @!parse - # # @param data [String] - # # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] - # # - # def initialize(data:, format_:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, format_:) + # @param data [String] + # @param format_ [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartInputAudio::InputAudio::Format] # The format of the encoded audio data. Currently supports "wav" and "mp3". # @@ -58,11 +50,8 @@ module Format WAV = :wav MP3 = :mp3 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat/chat_completion_content_part_refusal.rb b/lib/openai/models/chat/chat_completion_content_part_refusal.rb index 20e3bc5d..9137d008 100644 --- a/lib/openai/models/chat/chat_completion_content_part_refusal.rb +++ b/lib/openai/models/chat/chat_completion_content_part_refusal.rb @@ -16,13 +16,9 @@ class ChatCompletionContentPartRefusal < OpenAI::Internal::Type::BaseModel # @return [Symbol, :refusal] required :type, const: :refusal - # @!parse - # # @param refusal [String] - # # @param type [Symbol, :refusal] - # # - # def initialize(refusal:, type: :refusal, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(refusal:, type: :refusal) + # @param refusal [String] + # @param type [Symbol, :refusal] end end diff --git a/lib/openai/models/chat/chat_completion_content_part_text.rb b/lib/openai/models/chat/chat_completion_content_part_text.rb index e1975859..212467d9 100644 --- a/lib/openai/models/chat/chat_completion_content_part_text.rb +++ b/lib/openai/models/chat/chat_completion_content_part_text.rb @@ -16,16 +16,12 @@ class ChatCompletionContentPartText < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!parse - # # Learn about - # # [text inputs](https://platform.openai.com/docs/guides/text-generation). - # # - # # @param text [String] - # # @param type [Symbol, :text] - # # - # def initialize(text:, type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :text) + # Learn about + # [text inputs](https://platform.openai.com/docs/guides/text-generation). + # + # @param text [String] + # @param type [Symbol, :text] end end diff --git a/lib/openai/models/chat/chat_completion_deleted.rb b/lib/openai/models/chat/chat_completion_deleted.rb index 33c9c802..a3125c86 100644 --- a/lib/openai/models/chat/chat_completion_deleted.rb +++ b/lib/openai/models/chat/chat_completion_deleted.rb @@ -23,14 +23,10 @@ class ChatCompletionDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"chat.completion.deleted"] required :object, const: :"chat.completion.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"chat.completion.deleted"] - # # - # def initialize(id:, deleted:, object: :"chat.completion.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"chat.completion.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"chat.completion.deleted"] end end diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index ecd1e5bd..1eb265ea 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -27,18 +27,14 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # Developer-provided instructions that the model should follow, regardless of - # # messages sent by the user. With o1 models and newer, `developer` messages - # # replace the previous `system` messages. - # # - # # @param content [String, Array] - # # @param name [String] - # # @param role [Symbol, :developer] - # # - # def initialize(content:, name: nil, role: :developer, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, name: nil, role: :developer) + # Developer-provided instructions that the model should follow, regardless of + # messages sent by the user. With o1 models and newer, `developer` messages + # replace the previous `system` messages. + # + # @param content [String, Array] + # @param name [String] + # @param role [Symbol, :developer] # The contents of the developer message. # @@ -52,9 +48,8 @@ module Content # An array of content parts with a defined type. For developer messages, only type `text` is supported. variant -> { OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam::Content::ChatCompletionContentPartTextArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_function_call_option.rb b/lib/openai/models/chat/chat_completion_function_call_option.rb index 3ae8526b..9c7d28fd 100644 --- a/lib/openai/models/chat/chat_completion_function_call_option.rb +++ b/lib/openai/models/chat/chat_completion_function_call_option.rb @@ -10,15 +10,11 @@ class ChatCompletionFunctionCallOption < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # Specifying a particular function via `{"name": "my_function"}` forces the model - # # to call that function. - # # - # # @param name [String] - # # - # def initialize(name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:) + # Specifying a particular function via `{"name": "my_function"}` forces the model + # to call that function. + # + # @param name [String] end end diff --git a/lib/openai/models/chat/chat_completion_function_message_param.rb b/lib/openai/models/chat/chat_completion_function_message_param.rb index 7e00e030..4a8efb76 100644 --- a/lib/openai/models/chat/chat_completion_function_message_param.rb +++ b/lib/openai/models/chat/chat_completion_function_message_param.rb @@ -23,14 +23,10 @@ class ChatCompletionFunctionMessageParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :role, const: :function - # @!parse - # # @param content [String, nil] - # # @param name [String] - # # @param role [Symbol, :function] - # # - # def initialize(content:, name:, role: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, name:, role: :function) + # @param content [String, nil] + # @param name [String] + # @param role [Symbol, :function] end end diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index b36682ef..63485db1 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -64,31 +64,16 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tool_calls - # @!parse - # # A chat completion message generated by the model. - # # - # # @param content [String, nil] - # # @param refusal [String, nil] - # # @param annotations [Array] - # # @param audio [OpenAI::Models::Chat::ChatCompletionAudio, nil] - # # @param function_call [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] - # # @param tool_calls [Array] - # # @param role [Symbol, :assistant] - # # - # def initialize( - # content:, - # refusal:, - # annotations: nil, - # audio: nil, - # function_call: nil, - # tool_calls: nil, - # role: :assistant, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, refusal:, annotations: nil, audio: nil, function_call: nil, tool_calls: nil, role: :assistant) + # A chat completion message generated by the model. + # + # @param content [String, nil] + # @param refusal [String, nil] + # @param annotations [Array] + # @param audio [OpenAI::Models::Chat::ChatCompletionAudio, nil] + # @param function_call [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] + # @param tool_calls [Array] + # @param role [Symbol, :assistant] class Annotation < OpenAI::Internal::Type::BaseModel # @!attribute type @@ -103,15 +88,11 @@ class Annotation < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] required :url_citation, -> { OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation } - # @!parse - # # A URL citation when using web search. - # # - # # @param url_citation [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] - # # @param type [Symbol, :url_citation] - # # - # def initialize(url_citation:, type: :url_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(url_citation:, type: :url_citation) + # A URL citation when using web search. + # + # @param url_citation [OpenAI::Models::Chat::ChatCompletionMessage::Annotation::URLCitation] + # @param type [Symbol, :url_citation] # @see OpenAI::Models::Chat::ChatCompletionMessage::Annotation#url_citation class URLCitation < OpenAI::Internal::Type::BaseModel @@ -139,17 +120,13 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!parse - # # A URL citation when using web search. - # # - # # @param end_index [Integer] - # # @param start_index [Integer] - # # @param title [String] - # # @param url [String] - # # - # def initialize(end_index:, start_index:, title:, url:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_index:, start_index:, title:, url:) + # A URL citation when using web search. + # + # @param end_index [Integer] + # @param start_index [Integer] + # @param title [String] + # @param url [String] end end @@ -172,16 +149,12 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # Deprecated and replaced by `tool_calls`. The name and arguments of a function - # # that should be called, as generated by the model. - # # - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments:, name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, name:) + # Deprecated and replaced by `tool_calls`. The name and arguments of a function + # that should be called, as generated by the model. + # + # @param arguments [String] + # @param name [String] end end end diff --git a/lib/openai/models/chat/chat_completion_message_param.rb b/lib/openai/models/chat/chat_completion_message_param.rb index ed72d515..16e5d625 100644 --- a/lib/openai/models/chat/chat_completion_message_param.rb +++ b/lib/openai/models/chat/chat_completion_message_param.rb @@ -32,9 +32,8 @@ module ChatCompletionMessageParam variant :function, -> { OpenAI::Models::Chat::ChatCompletionFunctionMessageParam } - # @!parse - # # @return [Array(OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Chat::ChatCompletionDeveloperMessageParam, OpenAI::Models::Chat::ChatCompletionSystemMessageParam, OpenAI::Models::Chat::ChatCompletionUserMessageParam, OpenAI::Models::Chat::ChatCompletionAssistantMessageParam, OpenAI::Models::Chat::ChatCompletionToolMessageParam, OpenAI::Models::Chat::ChatCompletionFunctionMessageParam)] end end diff --git a/lib/openai/models/chat/chat_completion_message_tool_call.rb b/lib/openai/models/chat/chat_completion_message_tool_call.rb index cca6cc4e..adad4c9a 100644 --- a/lib/openai/models/chat/chat_completion_message_tool_call.rb +++ b/lib/openai/models/chat/chat_completion_message_tool_call.rb @@ -22,14 +22,10 @@ class ChatCompletionMessageToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # @param id [String] - # # @param function [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] - # # @param type [Symbol, :function] - # # - # def initialize(id:, function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, function:, type: :function) + # @param id [String] + # @param function [OpenAI::Models::Chat::ChatCompletionMessageToolCall::Function] + # @param type [Symbol, :function] # @see OpenAI::Models::Chat::ChatCompletionMessageToolCall#function class Function < OpenAI::Internal::Type::BaseModel @@ -48,15 +44,11 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # The function that the model called. - # # - # # @param arguments [String] - # # @param name [String] - # # - # def initialize(arguments:, name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, name:) + # The function that the model called. + # + # @param arguments [String] + # @param name [String] end end end diff --git a/lib/openai/models/chat/chat_completion_modality.rb b/lib/openai/models/chat/chat_completion_modality.rb index 05e0e087..10d3ba8c 100644 --- a/lib/openai/models/chat/chat_completion_modality.rb +++ b/lib/openai/models/chat/chat_completion_modality.rb @@ -9,11 +9,8 @@ module ChatCompletionModality TEXT = :text AUDIO = :audio - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end diff --git a/lib/openai/models/chat/chat_completion_named_tool_choice.rb b/lib/openai/models/chat/chat_completion_named_tool_choice.rb index c887906f..d6d7a955 100644 --- a/lib/openai/models/chat/chat_completion_named_tool_choice.rb +++ b/lib/openai/models/chat/chat_completion_named_tool_choice.rb @@ -15,16 +15,12 @@ class ChatCompletionNamedToolChoice < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # Specifies a tool the model should use. Use to force the model to call a specific - # # function. - # # - # # @param function [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] - # # @param type [Symbol, :function] - # # - # def initialize(function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(function:, type: :function) + # Specifies a tool the model should use. Use to force the model to call a specific + # function. + # + # @param function [OpenAI::Models::Chat::ChatCompletionNamedToolChoice::Function] + # @param type [Symbol, :function] # @see OpenAI::Models::Chat::ChatCompletionNamedToolChoice#function class Function < OpenAI::Internal::Type::BaseModel @@ -34,12 +30,8 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!parse - # # @param name [String] - # # - # def initialize(name:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:) + # @param name [String] end end end diff --git a/lib/openai/models/chat/chat_completion_prediction_content.rb b/lib/openai/models/chat/chat_completion_prediction_content.rb index 0cc7df62..b6c130fb 100644 --- a/lib/openai/models/chat/chat_completion_prediction_content.rb +++ b/lib/openai/models/chat/chat_completion_prediction_content.rb @@ -19,16 +19,12 @@ class ChatCompletionPredictionContent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :content] required :type, const: :content - # @!parse - # # Static predicted output content, such as the content of a text file that is - # # being regenerated. - # # - # # @param content [String, Array] - # # @param type [Symbol, :content] - # # - # def initialize(content:, type: :content, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, type: :content) + # Static predicted output content, such as the content of a text file that is + # being regenerated. + # + # @param content [String, Array] + # @param type [Symbol, :content] # The content that should be matched when generating a model response. If # generated tokens would match this content, the entire model response can be @@ -45,9 +41,8 @@ module Content # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text inputs. variant -> { OpenAI::Models::Chat::ChatCompletionPredictionContent::Content::ChatCompletionContentPartTextArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_role.rb b/lib/openai/models/chat/chat_completion_role.rb index 87bd08a8..514fe22c 100644 --- a/lib/openai/models/chat/chat_completion_role.rb +++ b/lib/openai/models/chat/chat_completion_role.rb @@ -14,11 +14,8 @@ module ChatCompletionRole TOOL = :tool FUNCTION = :function - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end diff --git a/lib/openai/models/chat/chat_completion_store_message.rb b/lib/openai/models/chat/chat_completion_store_message.rb index 3ae2370c..119b5a36 100644 --- a/lib/openai/models/chat/chat_completion_store_message.rb +++ b/lib/openai/models/chat/chat_completion_store_message.rb @@ -10,14 +10,10 @@ class ChatCompletionStoreMessage < OpenAI::Models::Chat::ChatCompletionMessage # @return [String] required :id, String - # @!parse - # # A chat completion message generated by the model. - # # - # # @param id [String] - # # - # def initialize(id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:) + # A chat completion message generated by the model. + # + # @param id [String] end end diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index cb29b9a6..04006615 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -20,14 +20,10 @@ class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :include_usage - # @!parse - # # Options for streaming response. Only set this when you set `stream: true`. - # # - # # @param include_usage [Boolean] - # # - # def initialize(include_usage: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(include_usage: nil) + # Options for streaming response. Only set this when you set `stream: true`. + # + # @param include_usage [Boolean] end end diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index 44d1d207..bb2c91b6 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -27,18 +27,14 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # Developer-provided instructions that the model should follow, regardless of - # # messages sent by the user. With o1 models and newer, use `developer` messages - # # for this purpose instead. - # # - # # @param content [String, Array] - # # @param name [String] - # # @param role [Symbol, :system] - # # - # def initialize(content:, name: nil, role: :system, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, name: nil, role: :system) + # Developer-provided instructions that the model should follow, regardless of + # messages sent by the user. With o1 models and newer, use `developer` messages + # for this purpose instead. + # + # @param content [String, Array] + # @param name [String] + # @param role [Symbol, :system] # The contents of the system message. # @@ -52,9 +48,8 @@ module Content # An array of content parts with a defined type. For system messages, only type `text` is supported. variant -> { OpenAI::Models::Chat::ChatCompletionSystemMessageParam::Content::ChatCompletionContentPartTextArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_token_logprob.rb b/lib/openai/models/chat/chat_completion_token_logprob.rb index a9f0bc0d..1ab8dd0f 100644 --- a/lib/openai/models/chat/chat_completion_token_logprob.rb +++ b/lib/openai/models/chat/chat_completion_token_logprob.rb @@ -36,15 +36,11 @@ class ChatCompletionTokenLogprob < OpenAI::Internal::Type::BaseModel required :top_logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTokenLogprob::TopLogprob] } - # @!parse - # # @param token [String] - # # @param bytes [Array, nil] - # # @param logprob [Float] - # # @param top_logprobs [Array] - # # - # def initialize(token:, bytes:, logprob:, top_logprobs:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(token:, bytes:, logprob:, top_logprobs:) + # @param token [String] + # @param bytes [Array, nil] + # @param logprob [Float] + # @param top_logprobs [Array] class TopLogprob < OpenAI::Internal::Type::BaseModel # @!attribute token @@ -70,14 +66,10 @@ class TopLogprob < OpenAI::Internal::Type::BaseModel # @return [Float] required :logprob, Float - # @!parse - # # @param token [String] - # # @param bytes [Array, nil] - # # @param logprob [Float] - # # - # def initialize(token:, bytes:, logprob:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(token:, bytes:, logprob:) + # @param token [String] + # @param bytes [Array, nil] + # @param logprob [Float] end end end diff --git a/lib/openai/models/chat/chat_completion_tool.rb b/lib/openai/models/chat/chat_completion_tool.rb index 8737a603..1cef5fd5 100644 --- a/lib/openai/models/chat/chat_completion_tool.rb +++ b/lib/openai/models/chat/chat_completion_tool.rb @@ -15,13 +15,9 @@ class ChatCompletionTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # @param function [OpenAI::Models::FunctionDefinition] - # # @param type [Symbol, :function] - # # - # def initialize(function:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(function:, type: :function) + # @param function [OpenAI::Models::FunctionDefinition] + # @param type [Symbol, :function] end end diff --git a/lib/openai/models/chat/chat_completion_tool_choice_option.rb b/lib/openai/models/chat/chat_completion_tool_choice_option.rb index c57aaf23..5fdd5796 100644 --- a/lib/openai/models/chat/chat_completion_tool_choice_option.rb +++ b/lib/openai/models/chat/chat_completion_tool_choice_option.rb @@ -31,16 +31,12 @@ module Auto AUTO = :auto REQUIRED = :required - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end - # @!parse - # # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice)] end end diff --git a/lib/openai/models/chat/chat_completion_tool_message_param.rb b/lib/openai/models/chat/chat_completion_tool_message_param.rb index 20c3a014..a0718af1 100644 --- a/lib/openai/models/chat/chat_completion_tool_message_param.rb +++ b/lib/openai/models/chat/chat_completion_tool_message_param.rb @@ -22,14 +22,10 @@ class ChatCompletionToolMessageParam < OpenAI::Internal::Type::BaseModel # @return [String] required :tool_call_id, String - # @!parse - # # @param content [String, Array] - # # @param tool_call_id [String] - # # @param role [Symbol, :tool] - # # - # def initialize(content:, tool_call_id:, role: :tool, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, tool_call_id:, role: :tool) + # @param content [String, Array] + # @param tool_call_id [String] + # @param role [Symbol, :tool] # The contents of the tool message. # @@ -43,9 +39,8 @@ module Content # An array of content parts with a defined type. For tool messages, only type `text` is supported. variant -> { OpenAI::Models::Chat::ChatCompletionToolMessageParam::Content::ChatCompletionContentPartTextArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ChatCompletionContentPartTextArray = OpenAI::Internal::Type::ArrayOf[-> { OpenAI::Models::Chat::ChatCompletionContentPartText }] diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 6cf8585a..34b81339 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -27,17 +27,13 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # Messages sent by an end user, containing prompts or additional context - # # information. - # # - # # @param content [String, Array] - # # @param name [String] - # # @param role [Symbol, :user] - # # - # def initialize(content:, name: nil, role: :user, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, name: nil, role: :user) + # Messages sent by an end user, containing prompts or additional context + # information. + # + # @param content [String, Array] + # @param name [String] + # @param role [Symbol, :user] # The contents of the user message. # @@ -51,9 +47,8 @@ module Content # An array of content parts with a defined type. Supported options differ based on the [model](https://platform.openai.com/docs/models) being used to generate the response. Can contain text, image, or audio inputs. variant -> { OpenAI::Models::Chat::ChatCompletionUserMessageParam::Content::ChatCompletionContentPartArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] ChatCompletionContentPartArray = OpenAI::Internal::Type::ArrayOf[union: -> { OpenAI::Models::Chat::ChatCompletionContentPart }] diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index bee73ddb..fff48ea0 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -355,77 +355,38 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] # attr_writer :web_search_options - # @!parse - # # @param messages [Array] - # # @param model [String, Symbol, OpenAI::Models::ChatModel] - # # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] - # # @param frequency_penalty [Float, nil] - # # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] - # # @param functions [Array] - # # @param logit_bias [Hash{Symbol=>Integer}, nil] - # # @param logprobs [Boolean, nil] - # # @param max_completion_tokens [Integer, nil] - # # @param max_tokens [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param modalities [Array, nil] - # # @param n [Integer, nil] - # # @param parallel_tool_calls [Boolean] - # # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] - # # @param presence_penalty [Float, nil] - # # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] - # # @param seed [Integer, nil] - # # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] - # # @param stop [String, Array, nil] - # # @param store [Boolean, nil] - # # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - # # @param temperature [Float, nil] - # # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] - # # @param tools [Array] - # # @param top_logprobs [Integer, nil] - # # @param top_p [Float, nil] - # # @param user [String] - # # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # messages:, - # model:, - # audio: nil, - # frequency_penalty: nil, - # function_call: nil, - # functions: nil, - # logit_bias: nil, - # logprobs: nil, - # max_completion_tokens: nil, - # max_tokens: nil, - # metadata: nil, - # modalities: nil, - # n: nil, - # parallel_tool_calls: nil, - # prediction: nil, - # presence_penalty: nil, - # reasoning_effort: nil, - # response_format: nil, - # seed: nil, - # service_tier: nil, - # stop: nil, - # store: nil, - # stream_options: nil, - # temperature: nil, - # tool_choice: nil, - # tools: nil, - # top_logprobs: nil, - # top_p: nil, - # user: nil, - # web_search_options: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) + # @param messages [Array] + # @param model [String, Symbol, OpenAI::Models::ChatModel] + # @param audio [OpenAI::Models::Chat::ChatCompletionAudioParam, nil] + # @param frequency_penalty [Float, nil] + # @param function_call [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] + # @param functions [Array] + # @param logit_bias [Hash{Symbol=>Integer}, nil] + # @param logprobs [Boolean, nil] + # @param max_completion_tokens [Integer, nil] + # @param max_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param modalities [Array, nil] + # @param n [Integer, nil] + # @param parallel_tool_calls [Boolean] + # @param prediction [OpenAI::Models::Chat::ChatCompletionPredictionContent, nil] + # @param presence_penalty [Float, nil] + # @param reasoning_effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param response_format [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] + # @param seed [Integer, nil] + # @param service_tier [Symbol, OpenAI::Models::Chat::CompletionCreateParams::ServiceTier, nil] + # @param stop [String, Array, nil] + # @param store [Boolean, nil] + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] + # @param temperature [Float, nil] + # @param tool_choice [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] + # @param tools [Array] + # @param top_logprobs [Integer, nil] + # @param top_p [Float, nil] + # @param user [String] + # @param web_search_options [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a # wide range of models with different capabilities, performance characteristics, @@ -443,9 +404,8 @@ module Model # to browse and compare available models. variant enum: -> { OpenAI::Models::ChatModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel)] end # @deprecated @@ -482,16 +442,12 @@ module FunctionCallMode NONE = :none AUTO = :auto - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end - # @!parse - # # @return [Array(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption)] end # @deprecated @@ -530,14 +486,10 @@ class Function < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>Object}] # attr_writer :parameters - # @!parse - # # @param name [String] - # # @param description [String] - # # @param parameters [Hash{Symbol=>Object}] - # # - # def initialize(name:, description: nil, parameters: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, description: nil, parameters: nil) + # @param name [String] + # @param description [String] + # @param parameters [Hash{Symbol=>Object}] end module Modality @@ -546,11 +498,8 @@ module Modality TEXT = :text AUDIO = :audio - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # An object specifying the format that the model must output. @@ -579,9 +528,8 @@ module ResponseFormat # to do so. variant -> { OpenAI::Models::ResponseFormatJSONObject } - # @!parse - # # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -608,11 +556,8 @@ module ServiceTier DEFAULT = :default FLEX = :flex - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Not supported with latest reasoning models `o3` and `o4-mini`. @@ -626,9 +571,8 @@ module Stop variant -> { OpenAI::Models::Chat::CompletionCreateParams::Stop::StringArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] StringArray = OpenAI::Internal::Type::ArrayOf[String] end @@ -654,17 +598,13 @@ class WebSearchOptions < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation }, nil?: true - # @!parse - # # This tool searches the web for relevant results to use in a response. Learn more - # # about the - # # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). - # # - # # @param search_context_size [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] - # # @param user_location [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] - # # - # def initialize(search_context_size: nil, user_location: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(search_context_size: nil, user_location: nil) + # This tool searches the web for relevant results to use in a response. Learn more + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + # + # @param search_context_size [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] + # @param user_location [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation, nil] # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. @@ -677,11 +617,8 @@ module SearchContextSize MEDIUM = :medium HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions#user_location @@ -699,15 +636,11 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :approximate] required :type, const: :approximate - # @!parse - # # Approximate location parameters for the search. - # # - # # @param approximate [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] - # # @param type [Symbol, :approximate] - # # - # def initialize(approximate:, type: :approximate, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(approximate:, type: :approximate) + # Approximate location parameters for the search. + # + # @param approximate [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation::Approximate] + # @param type [Symbol, :approximate] # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate class Approximate < OpenAI::Internal::Type::BaseModel @@ -753,17 +686,13 @@ class Approximate < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :timezone - # @!parse - # # Approximate location parameters for the search. - # # - # # @param city [String] - # # @param country [String] - # # @param region [String] - # # @param timezone [String] - # # - # def initialize(city: nil, country: nil, region: nil, timezone: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(city: nil, country: nil, region: nil, timezone: nil) + # Approximate location parameters for the search. + # + # @param city [String] + # @param country [String] + # @param region [String] + # @param timezone [String] end end end diff --git a/lib/openai/models/chat/completion_delete_params.rb b/lib/openai/models/chat/completion_delete_params.rb index a7441968..c32fe53d 100644 --- a/lib/openai/models/chat/completion_delete_params.rb +++ b/lib/openai/models/chat/completion_delete_params.rb @@ -9,12 +9,8 @@ class CompletionDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index dede958c..911b8a36 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -58,17 +58,13 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param order [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. @@ -78,11 +74,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat/completion_retrieve_params.rb b/lib/openai/models/chat/completion_retrieve_params.rb index 29df8685..ccdba91c 100644 --- a/lib/openai/models/chat/completion_retrieve_params.rb +++ b/lib/openai/models/chat/completion_retrieve_params.rb @@ -9,12 +9,8 @@ class CompletionRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index 0a8504f8..954a4400 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -20,13 +20,9 @@ class CompletionUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] required :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(metadata:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(metadata:, request_options: {}) + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index eca322af..02329b59 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -41,15 +41,11 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. @@ -59,11 +55,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/chat_model.rb b/lib/openai/models/chat_model.rb index 1bcb4858..e2cf7b8d 100644 --- a/lib/openai/models/chat_model.rb +++ b/lib/openai/models/chat_model.rb @@ -59,11 +59,8 @@ module ChatModel GPT_3_5_TURBO_0125 = :"gpt-3.5-turbo-0125" GPT_3_5_TURBO_16K_0613 = :"gpt-3.5-turbo-16k-0613" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/comparison_filter.rb b/lib/openai/models/comparison_filter.rb index 13134827..81515805 100644 --- a/lib/openai/models/comparison_filter.rb +++ b/lib/openai/models/comparison_filter.rb @@ -29,17 +29,13 @@ class ComparisonFilter < OpenAI::Internal::Type::BaseModel # @return [String, Float, Boolean] required :value, union: -> { OpenAI::Models::ComparisonFilter::Value } - # @!parse - # # A filter used to compare a specified attribute key to a given value using a - # # defined comparison operation. - # # - # # @param key [String] - # # @param type [Symbol, OpenAI::Models::ComparisonFilter::Type] - # # @param value [String, Float, Boolean] - # # - # def initialize(key:, type:, value:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(key:, type:, value:) + # A filter used to compare a specified attribute key to a given value using a + # defined comparison operation. + # + # @param key [String] + # @param type [Symbol, OpenAI::Models::ComparisonFilter::Type] + # @param value [String, Float, Boolean] # Specifies the comparison operator: `eq`, `ne`, `gt`, `gte`, `lt`, `lte`. # @@ -61,11 +57,8 @@ module Type LT = :lt LTE = :lte - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The value to compare against the attribute key; supports string, number, or @@ -81,9 +74,8 @@ module Value variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index 6062d60a..e31c3bbf 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -59,21 +59,17 @@ class Completion < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::CompletionUsage] # attr_writer :usage - # @!parse - # # Represents a completion response from the API. Note: both the streamed and - # # non-streamed response objects share the same shape (unlike the chat endpoint). - # # - # # @param id [String] - # # @param choices [Array] - # # @param created [Integer] - # # @param model [String] - # # @param system_fingerprint [String] - # # @param usage [OpenAI::Models::CompletionUsage] - # # @param object [Symbol, :text_completion] - # # - # def initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion) + # Represents a completion response from the API. Note: both the streamed and + # non-streamed response objects share the same shape (unlike the chat endpoint). + # + # @param id [String] + # @param choices [Array] + # @param created [Integer] + # @param model [String] + # @param system_fingerprint [String] + # @param usage [OpenAI::Models::CompletionUsage] + # @param object [Symbol, :text_completion] end end end diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 07081468..096074a0 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -27,15 +27,11 @@ class CompletionChoice < OpenAI::Internal::Type::BaseModel # @return [String] required :text, String - # @!parse - # # @param finish_reason [Symbol, OpenAI::Models::CompletionChoice::FinishReason] - # # @param index [Integer] - # # @param logprobs [OpenAI::Models::CompletionChoice::Logprobs, nil] - # # @param text [String] - # # - # def initialize(finish_reason:, index:, logprobs:, text:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(finish_reason:, index:, logprobs:, text:) + # @param finish_reason [Symbol, OpenAI::Models::CompletionChoice::FinishReason] + # @param index [Integer] + # @param logprobs [OpenAI::Models::CompletionChoice::Logprobs, nil] + # @param text [String] # The reason the model stopped generating tokens. This will be `stop` if the model # hit a natural stop point or a provided stop sequence, `length` if the maximum @@ -50,11 +46,8 @@ module FinishReason LENGTH = :length CONTENT_FILTER = :content_filter - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::CompletionChoice#logprobs @@ -95,15 +88,11 @@ class Logprobs < OpenAI::Internal::Type::BaseModel # # @return [ArrayFloat}>] # attr_writer :top_logprobs - # @!parse - # # @param text_offset [Array] - # # @param token_logprobs [Array] - # # @param tokens [Array] - # # @param top_logprobs [ArrayFloat}>] - # # - # def initialize(text_offset: nil, token_logprobs: nil, tokens: nil, top_logprobs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text_offset: nil, token_logprobs: nil, tokens: nil, top_logprobs: nil) + # @param text_offset [Array] + # @param token_logprobs [Array] + # @param tokens [Array] + # @param top_logprobs [ArrayFloat}>] end end end diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index 6463fc0e..c1e3656e 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -187,51 +187,25 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] - # # @param prompt [String, Array, Array, Array>, nil] - # # @param best_of [Integer, nil] - # # @param echo [Boolean, nil] - # # @param frequency_penalty [Float, nil] - # # @param logit_bias [Hash{Symbol=>Integer}, nil] - # # @param logprobs [Integer, nil] - # # @param max_tokens [Integer, nil] - # # @param n [Integer, nil] - # # @param presence_penalty [Float, nil] - # # @param seed [Integer, nil] - # # @param stop [String, Array, nil] - # # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] - # # @param suffix [String, nil] - # # @param temperature [Float, nil] - # # @param top_p [Float, nil] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # model:, - # prompt:, - # best_of: nil, - # echo: nil, - # frequency_penalty: nil, - # logit_bias: nil, - # logprobs: nil, - # max_tokens: nil, - # n: nil, - # presence_penalty: nil, - # seed: nil, - # stop: nil, - # stream_options: nil, - # suffix: nil, - # temperature: nil, - # top_p: nil, - # user: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) + # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] + # @param prompt [String, Array, Array, Array>, nil] + # @param best_of [Integer, nil] + # @param echo [Boolean, nil] + # @param frequency_penalty [Float, nil] + # @param logit_bias [Hash{Symbol=>Integer}, nil] + # @param logprobs [Integer, nil] + # @param max_tokens [Integer, nil] + # @param n [Integer, nil] + # @param presence_penalty [Float, nil] + # @param seed [Integer, nil] + # @param stop [String, Array, nil] + # @param stream_options [OpenAI::Models::Chat::ChatCompletionStreamOptions, nil] + # @param suffix [String, nil] + # @param temperature [Float, nil] + # @param top_p [Float, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to @@ -249,9 +223,8 @@ module Model variant const: -> { OpenAI::Models::CompletionCreateParams::Model::BABBAGE_002 } - # @!parse - # # @return [Array(String, Symbol)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol)] # @!group @@ -279,9 +252,8 @@ module Prompt variant -> { OpenAI::Models::CompletionCreateParams::Prompt::ArrayOfToken2DArray } - # @!parse - # # @return [Array(String, Array, Array, Array>)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array, Array, Array>)] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -301,9 +273,8 @@ module Stop variant -> { OpenAI::Models::CompletionCreateParams::Stop::StringArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] StringArray = OpenAI::Internal::Type::ArrayOf[String] end diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index 0f098720..d8e75136 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -41,27 +41,14 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::CompletionUsage::PromptTokensDetails] # attr_writer :prompt_tokens_details - # @!parse - # # Usage statistics for the completion request. - # # - # # @param completion_tokens [Integer] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # @param completion_tokens_details [OpenAI::Models::CompletionUsage::CompletionTokensDetails] - # # @param prompt_tokens_details [OpenAI::Models::CompletionUsage::PromptTokensDetails] - # # - # def initialize( - # completion_tokens:, - # prompt_tokens:, - # total_tokens:, - # completion_tokens_details: nil, - # prompt_tokens_details: nil, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:, completion_tokens_details: nil, prompt_tokens_details: nil) + # Usage statistics for the completion request. + # + # @param completion_tokens [Integer] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] + # @param completion_tokens_details [OpenAI::Models::CompletionUsage::CompletionTokensDetails] + # @param prompt_tokens_details [OpenAI::Models::CompletionUsage::PromptTokensDetails] # @see OpenAI::Models::CompletionUsage#completion_tokens_details class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel @@ -109,25 +96,13 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # # @return [Integer] # attr_writer :rejected_prediction_tokens - # @!parse - # # Breakdown of tokens used in a completion. - # # - # # @param accepted_prediction_tokens [Integer] - # # @param audio_tokens [Integer] - # # @param reasoning_tokens [Integer] - # # @param rejected_prediction_tokens [Integer] - # # - # def initialize( - # accepted_prediction_tokens: nil, - # audio_tokens: nil, - # reasoning_tokens: nil, - # rejected_prediction_tokens: nil, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(accepted_prediction_tokens: nil, audio_tokens: nil, reasoning_tokens: nil, rejected_prediction_tokens: nil) + # Breakdown of tokens used in a completion. + # + # @param accepted_prediction_tokens [Integer] + # @param audio_tokens [Integer] + # @param reasoning_tokens [Integer] + # @param rejected_prediction_tokens [Integer] end # @see OpenAI::Models::CompletionUsage#prompt_tokens_details @@ -152,15 +127,11 @@ class PromptTokensDetails < OpenAI::Internal::Type::BaseModel # # @return [Integer] # attr_writer :cached_tokens - # @!parse - # # Breakdown of tokens used in the prompt. - # # - # # @param audio_tokens [Integer] - # # @param cached_tokens [Integer] - # # - # def initialize(audio_tokens: nil, cached_tokens: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(audio_tokens: nil, cached_tokens: nil) + # Breakdown of tokens used in the prompt. + # + # @param audio_tokens [Integer] + # @param cached_tokens [Integer] end end end diff --git a/lib/openai/models/compound_filter.rb b/lib/openai/models/compound_filter.rb index 57314aa0..228c5572 100644 --- a/lib/openai/models/compound_filter.rb +++ b/lib/openai/models/compound_filter.rb @@ -16,15 +16,11 @@ class CompoundFilter < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::CompoundFilter::Type] required :type, enum: -> { OpenAI::Models::CompoundFilter::Type } - # @!parse - # # Combine multiple filters using `and` or `or`. - # # - # # @param filters [Array] - # # @param type [Symbol, OpenAI::Models::CompoundFilter::Type] - # # - # def initialize(filters:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(filters:, type:) + # Combine multiple filters using `and` or `or`. + # + # @param filters [Array] + # @param type [Symbol, OpenAI::Models::CompoundFilter::Type] # A filter used to compare a specified attribute key to a given value using a # defined comparison operation. @@ -36,9 +32,8 @@ module Filter variant OpenAI::Internal::Type::Unknown - # @!parse - # # @return [Array(OpenAI::Models::ComparisonFilter, Object)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ComparisonFilter, Object)] end # Type of operation: `and` or `or`. @@ -50,11 +45,8 @@ module Type AND = :and OR = :or - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/create_embedding_response.rb b/lib/openai/models/create_embedding_response.rb index bde0fa74..35e2aa45 100644 --- a/lib/openai/models/create_embedding_response.rb +++ b/lib/openai/models/create_embedding_response.rb @@ -28,15 +28,11 @@ class CreateEmbeddingResponse < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::CreateEmbeddingResponse::Usage] required :usage, -> { OpenAI::Models::CreateEmbeddingResponse::Usage } - # @!parse - # # @param data [Array] - # # @param model [String] - # # @param usage [OpenAI::Models::CreateEmbeddingResponse::Usage] - # # @param object [Symbol, :list] - # # - # def initialize(data:, model:, usage:, object: :list, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, model:, usage:, object: :list) + # @param data [Array] + # @param model [String] + # @param usage [OpenAI::Models::CreateEmbeddingResponse::Usage] + # @param object [Symbol, :list] # @see OpenAI::Models::CreateEmbeddingResponse#usage class Usage < OpenAI::Internal::Type::BaseModel @@ -52,15 +48,11 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # The usage information for the request. - # # - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(prompt_tokens:, total_tokens:) + # The usage information for the request. + # + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end end end diff --git a/lib/openai/models/embedding.rb b/lib/openai/models/embedding.rb index 75d1aebf..e1e28ded 100644 --- a/lib/openai/models/embedding.rb +++ b/lib/openai/models/embedding.rb @@ -23,16 +23,12 @@ class Embedding < OpenAI::Internal::Type::BaseModel # @return [Symbol, :embedding] required :object, const: :embedding - # @!parse - # # Represents an embedding vector returned by embedding endpoint. - # # - # # @param embedding [Array] - # # @param index [Integer] - # # @param object [Symbol, :embedding] - # # - # def initialize(embedding:, index:, object: :embedding, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(embedding:, index:, object: :embedding) + # Represents an embedding vector returned by embedding endpoint. + # + # @param embedding [Array] + # @param index [Integer] + # @param object [Symbol, :embedding] end end end diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 3f26541e..2586d07f 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -65,17 +65,13 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param input [String, Array, Array, Array>] - # # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] - # # @param dimensions [Integer] - # # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}) + # @param input [String, Array, Array, Array>] + # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] + # @param dimensions [Integer] + # @param encoding_format [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Input text to embed, encoded as a string or array of tokens. To embed multiple # inputs in a single request, pass an array of strings or array of token arrays. @@ -100,9 +96,8 @@ module Input # The array of arrays containing integers that will be turned into an embedding. variant -> { OpenAI::Models::EmbeddingCreateParams::Input::ArrayOfToken2DArray } - # @!parse - # # @return [Array(String, Array, Array, Array>)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array, Array, Array>)] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -124,9 +119,8 @@ module Model # ID of the model to use. You can use the [List models](https://platform.openai.com/docs/api-reference/models/list) API to see all of your available models, or see our [Model overview](https://platform.openai.com/docs/models) for descriptions of them. variant enum: -> { OpenAI::Models::EmbeddingModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::EmbeddingModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::EmbeddingModel)] end # The format to return the embeddings in. Can be either `float` or @@ -137,11 +131,8 @@ module EncodingFormat FLOAT = :float BASE64 = :base64 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/embedding_model.rb b/lib/openai/models/embedding_model.rb index 33ab9d7f..0692fbb1 100644 --- a/lib/openai/models/embedding_model.rb +++ b/lib/openai/models/embedding_model.rb @@ -9,11 +9,8 @@ module EmbeddingModel TEXT_EMBEDDING_3_SMALL = :"text-embedding-3-small" TEXT_EMBEDDING_3_LARGE = :"text-embedding-3-large" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/error_object.rb b/lib/openai/models/error_object.rb index 37031a55..b33d574a 100644 --- a/lib/openai/models/error_object.rb +++ b/lib/openai/models/error_object.rb @@ -23,15 +23,11 @@ class ErrorObject < OpenAI::Internal::Type::BaseModel # @return [String] required :type, String - # @!parse - # # @param code [String, nil] - # # @param message [String] - # # @param param [String, nil] - # # @param type [String] - # # - # def initialize(code:, message:, param:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:, param:, type:) + # @param code [String, nil] + # @param message [String] + # @param param [String, nil] + # @param type [String] end end end diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index dc2ef4ac..d66fae28 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -52,27 +52,13 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :share_with_openai - # @!parse - # # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] - # # @param testing_criteria [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param share_with_openai [Boolean] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # data_source_config:, - # testing_criteria:, - # metadata: nil, - # name: nil, - # share_with_openai: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data_source_config:, testing_criteria:, metadata: nil, name: nil, share_with_openai: nil, request_options: {}) + # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] + # @param testing_criteria [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The configuration for the data source used for the evaluation runs. module DataSourceConfig @@ -113,21 +99,17 @@ class Custom < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :include_sample_schema - # @!parse - # # A CustomDataSourceConfig object that defines the schema for the data source used - # # for the evaluation runs. This schema is used to define the shape of the data - # # that will be: - # # - # # - Used to define your testing criteria and - # # - What data is required when creating a run - # # - # # @param item_schema [Hash{Symbol=>Object}] - # # @param include_sample_schema [Boolean] - # # @param type [Symbol, :custom] - # # - # def initialize(item_schema:, include_sample_schema: nil, type: :custom, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_schema:, include_sample_schema: nil, type: :custom) + # A CustomDataSourceConfig object that defines the schema for the data source used + # for the evaluation runs. This schema is used to define the shape of the data + # that will be: + # + # - Used to define your testing criteria and + # - What data is required when creating a run + # + # @param item_schema [Hash{Symbol=>Object}] + # @param include_sample_schema [Boolean] + # @param type [Symbol, :custom] end class StoredCompletions < OpenAI::Internal::Type::BaseModel @@ -148,22 +130,17 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # A data source config which specifies the metadata property of your stored - # # completions query. This is usually metadata like `usecase=chatbot` or - # # `prompt-version=v2`, etc. - # # - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param type [Symbol, :stored_completions] - # # - # def initialize(metadata: nil, type: :stored_completions, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(metadata: nil, type: :stored_completions) + # A data source config which specifies the metadata property of your stored + # completions query. This is usually metadata like `usecase=chatbot` or + # `prompt-version=v2`, etc. + # + # @param metadata [Hash{Symbol=>String}, nil] + # @param type [Symbol, :stored_completions] end - # @!parse - # # @return [Array(OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -220,20 +197,16 @@ class LabelModel < OpenAI::Internal::Type::BaseModel # @return [Symbol, :label_model] required :type, const: :label_model - # @!parse - # # A LabelModelGrader object which uses a model to assign labels to each item in - # # the evaluation. - # # - # # @param input [Array] - # # @param labels [Array] - # # @param model [String] - # # @param name [String] - # # @param passing_labels [Array] - # # @param type [Symbol, :label_model] - # # - # def initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model) + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + # + # @param input [Array] + # @param labels [Array] + # @param model [String] + # @param name [String] + # @param passing_labels [Array] + # @param type [Symbol, :label_model] module Input extend OpenAI::Internal::Type::Union @@ -257,13 +230,9 @@ class SimpleInputMessage < OpenAI::Internal::Type::BaseModel # @return [String] required :role, String - # @!parse - # # @param content [String] - # # @param role [String] - # # - # def initialize(content:, role:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:) + # @param content [String] + # @param role [String] end class InputMessage < OpenAI::Internal::Type::BaseModel @@ -287,14 +256,10 @@ class InputMessage < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type } - # @!parse - # # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content] - # # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role] - # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type] - # # - # def initialize(content:, role:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type:) + # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content] + # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Role] + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Type] # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage#content class Content < OpenAI::Internal::Type::BaseModel @@ -311,13 +276,9 @@ class Content < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage::Content::Type] # The type of content, which is always `input_text`. # @@ -327,11 +288,8 @@ module Type INPUT_TEXT = :input_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -345,11 +303,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of item, which is always `message`. @@ -360,11 +315,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -389,14 +341,10 @@ class OutputMessage < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type } - # @!parse - # # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content] - # # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role] - # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type] - # # - # def initialize(content:, role:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type:) + # @param content [OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content] + # @param role [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Role] + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Type] # @see OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage#content class Content < OpenAI::Internal::Type::BaseModel @@ -413,13 +361,9 @@ class Content < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage::Content::Type] # The type of content, which is always `output_text`. # @@ -429,11 +373,8 @@ module Type OUTPUT_TEXT = :output_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -445,11 +386,8 @@ module Role ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of item, which is always `message`. @@ -460,23 +398,18 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end - # @!parse - # # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::SimpleInputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::InputMessage, OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel::Input::OutputMessage)] end end - # @!parse - # # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCreateParams::TestingCriterion::LabelModel, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] end end end diff --git a/lib/openai/models/eval_create_response.rb b/lib/openai/models/eval_create_response.rb index f5e1cb70..12832a04 100644 --- a/lib/openai/models/eval_create_response.rb +++ b/lib/openai/models/eval_create_response.rb @@ -58,38 +58,22 @@ class EvalCreateResponse < OpenAI::Internal::Type::BaseModel required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalCreateResponse::TestingCriterion] } - # @!parse - # # An Eval object with a data source config and testing criteria. An Eval - # # represents a task to be done for your LLM integration. Like: - # # - # # - Improve the quality of my chatbot - # # - See how well my chatbot handles customer support - # # - Check if o3-mini is better at my usecase than gpt-4o - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param share_with_openai [Boolean] - # # @param testing_criteria [Array] - # # @param object [Symbol, :eval] - # # - # def initialize( - # id:, - # created_at:, - # data_source_config:, - # metadata:, - # name:, - # share_with_openai:, - # testing_criteria:, - # object: :eval, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + # + # @param id [String] + # @param created_at [Integer] + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param testing_criteria [Array] + # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. # @@ -111,9 +95,8 @@ module DataSourceConfig # `item` and `sample` are both defined when using this data source config. variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } - # @!parse - # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -133,9 +116,8 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } - # @!parse - # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] end end end diff --git a/lib/openai/models/eval_custom_data_source_config.rb b/lib/openai/models/eval_custom_data_source_config.rb index 5be74c5c..04e45803 100644 --- a/lib/openai/models/eval_custom_data_source_config.rb +++ b/lib/openai/models/eval_custom_data_source_config.rb @@ -16,20 +16,16 @@ class EvalCustomDataSourceConfig < OpenAI::Internal::Type::BaseModel # @return [Symbol, :custom] required :type, const: :custom - # @!parse - # # A CustomDataSourceConfig which specifies the schema of your `item` and - # # optionally `sample` namespaces. The response schema defines the shape of the - # # data that will be: - # # - # # - Used to define your testing criteria and - # # - What data is required when creating a run - # # - # # @param schema [Hash{Symbol=>Object}] - # # @param type [Symbol, :custom] - # # - # def initialize(schema:, type: :custom, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(schema:, type: :custom) + # A CustomDataSourceConfig which specifies the schema of your `item` and + # optionally `sample` namespaces. The response schema defines the shape of the + # data that will be: + # + # - Used to define your testing criteria and + # - What data is required when creating a run + # + # @param schema [Hash{Symbol=>Object}] + # @param type [Symbol, :custom] end end end diff --git a/lib/openai/models/eval_delete_params.rb b/lib/openai/models/eval_delete_params.rb index de02e69c..80e4d81d 100644 --- a/lib/openai/models/eval_delete_params.rb +++ b/lib/openai/models/eval_delete_params.rb @@ -8,12 +8,8 @@ class EvalDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/eval_delete_response.rb b/lib/openai/models/eval_delete_response.rb index a60ec9d1..5495ca3d 100644 --- a/lib/openai/models/eval_delete_response.rb +++ b/lib/openai/models/eval_delete_response.rb @@ -19,14 +19,10 @@ class EvalDeleteResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :object, String - # @!parse - # # @param deleted [Boolean] - # # @param eval_id [String] - # # @param object [String] - # # - # def initialize(deleted:, eval_id:, object:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(deleted:, eval_id:, object:) + # @param deleted [Boolean] + # @param eval_id [String] + # @param object [String] end end end diff --git a/lib/openai/models/eval_label_model_grader.rb b/lib/openai/models/eval_label_model_grader.rb index 1455f6e4..2318e553 100644 --- a/lib/openai/models/eval_label_model_grader.rb +++ b/lib/openai/models/eval_label_model_grader.rb @@ -39,20 +39,16 @@ class EvalLabelModelGrader < OpenAI::Internal::Type::BaseModel # @return [Symbol, :label_model] required :type, const: :label_model - # @!parse - # # A LabelModelGrader object which uses a model to assign labels to each item in - # # the evaluation. - # # - # # @param input [Array] - # # @param labels [Array] - # # @param model [String] - # # @param name [String] - # # @param passing_labels [Array] - # # @param type [Symbol, :label_model] - # # - # def initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, labels:, model:, name:, passing_labels:, type: :label_model) + # A LabelModelGrader object which uses a model to assign labels to each item in + # the evaluation. + # + # @param input [Array] + # @param labels [Array] + # @param model [String] + # @param name [String] + # @param passing_labels [Array] + # @param type [Symbol, :label_model] # An item can either be an input message or an output message. module Input @@ -82,14 +78,10 @@ class InputMessage < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type] required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type } - # @!parse - # # @param content [OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content] - # # @param role [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role] - # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type] - # # - # def initialize(content:, role:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type:) + # @param content [OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content] + # @param role [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Role] + # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Type] # @see OpenAI::Models::EvalLabelModelGrader::Input::InputMessage#content class Content < OpenAI::Internal::Type::BaseModel @@ -105,13 +97,9 @@ class Content < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type] required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage::Content::Type] # The type of content, which is always `input_text`. # @@ -121,11 +109,8 @@ module Type INPUT_TEXT = :input_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -139,11 +124,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of item, which is always `message`. @@ -154,11 +136,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -180,14 +159,10 @@ class Assistant < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type] required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type } - # @!parse - # # @param content [OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content] - # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type] - # # @param role [Symbol, :assistant] - # # - # def initialize(content:, type:, role: :assistant, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, type:, role: :assistant) + # @param content [OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content] + # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Type] + # @param role [Symbol, :assistant] # @see OpenAI::Models::EvalLabelModelGrader::Input::Assistant#content class Content < OpenAI::Internal::Type::BaseModel @@ -203,13 +178,9 @@ class Content < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type] required :type, enum: -> { OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::EvalLabelModelGrader::Input::Assistant::Content::Type] # The type of content, which is always `output_text`. # @@ -219,11 +190,8 @@ module Type OUTPUT_TEXT = :output_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -235,17 +203,13 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end - # @!parse - # # @return [Array(OpenAI::Models::EvalLabelModelGrader::Input::Assistant, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalLabelModelGrader::Input::Assistant, OpenAI::Models::EvalLabelModelGrader::Input::InputMessage)] end end end diff --git a/lib/openai/models/eval_list_params.rb b/lib/openai/models/eval_list_params.rb index cfee50cd..e0d2fd84 100644 --- a/lib/openai/models/eval_list_params.rb +++ b/lib/openai/models/eval_list_params.rb @@ -50,16 +50,12 @@ class EvalListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy] # attr_writer :order_by - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::EvalListParams::Order] - # # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::EvalListParams::Order] + # @param order_by [Symbol, OpenAI::Models::EvalListParams::OrderBy] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for # descending order. @@ -69,11 +65,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Evals can be ordered by creation time or last updated time. Use `created_at` for @@ -84,11 +77,8 @@ module OrderBy CREATED_AT = :created_at UPDATED_AT = :updated_at - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/eval_list_response.rb b/lib/openai/models/eval_list_response.rb index e91b7773..0abeed04 100644 --- a/lib/openai/models/eval_list_response.rb +++ b/lib/openai/models/eval_list_response.rb @@ -58,38 +58,22 @@ class EvalListResponse < OpenAI::Internal::Type::BaseModel required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalListResponse::TestingCriterion] } - # @!parse - # # An Eval object with a data source config and testing criteria. An Eval - # # represents a task to be done for your LLM integration. Like: - # # - # # - Improve the quality of my chatbot - # # - See how well my chatbot handles customer support - # # - Check if o3-mini is better at my usecase than gpt-4o - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param share_with_openai [Boolean] - # # @param testing_criteria [Array] - # # @param object [Symbol, :eval] - # # - # def initialize( - # id:, - # created_at:, - # data_source_config:, - # metadata:, - # name:, - # share_with_openai:, - # testing_criteria:, - # object: :eval, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + # + # @param id [String] + # @param created_at [Integer] + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param testing_criteria [Array] + # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. # @@ -111,9 +95,8 @@ module DataSourceConfig # `item` and `sample` are both defined when using this data source config. variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } - # @!parse - # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -133,9 +116,8 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } - # @!parse - # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] end end end diff --git a/lib/openai/models/eval_retrieve_params.rb b/lib/openai/models/eval_retrieve_params.rb index 06e448ac..e06ffbe4 100644 --- a/lib/openai/models/eval_retrieve_params.rb +++ b/lib/openai/models/eval_retrieve_params.rb @@ -8,12 +8,8 @@ class EvalRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/eval_retrieve_response.rb b/lib/openai/models/eval_retrieve_response.rb index 8b12f5ed..b1d14b18 100644 --- a/lib/openai/models/eval_retrieve_response.rb +++ b/lib/openai/models/eval_retrieve_response.rb @@ -58,38 +58,22 @@ class EvalRetrieveResponse < OpenAI::Internal::Type::BaseModel required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalRetrieveResponse::TestingCriterion] } - # @!parse - # # An Eval object with a data source config and testing criteria. An Eval - # # represents a task to be done for your LLM integration. Like: - # # - # # - Improve the quality of my chatbot - # # - See how well my chatbot handles customer support - # # - Check if o3-mini is better at my usecase than gpt-4o - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param share_with_openai [Boolean] - # # @param testing_criteria [Array] - # # @param object [Symbol, :eval] - # # - # def initialize( - # id:, - # created_at:, - # data_source_config:, - # metadata:, - # name:, - # share_with_openai:, - # testing_criteria:, - # object: :eval, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + # + # @param id [String] + # @param created_at [Integer] + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param testing_criteria [Array] + # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. # @@ -111,9 +95,8 @@ module DataSourceConfig # `item` and `sample` are both defined when using this data source config. variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } - # @!parse - # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -133,9 +116,8 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } - # @!parse - # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] end end end diff --git a/lib/openai/models/eval_stored_completions_data_source_config.rb b/lib/openai/models/eval_stored_completions_data_source_config.rb index 54752125..53940b3e 100644 --- a/lib/openai/models/eval_stored_completions_data_source_config.rb +++ b/lib/openai/models/eval_stored_completions_data_source_config.rb @@ -27,20 +27,16 @@ class EvalStoredCompletionsDataSourceConfig < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # A StoredCompletionsDataSourceConfig which specifies the metadata property of - # # your stored completions query. This is usually metadata like `usecase=chatbot` - # # or `prompt-version=v2`, etc. The schema returned by this data source config is - # # used to defined what variables are available in your evals. `item` and `sample` - # # are both defined when using this data source config. - # # - # # @param schema [Hash{Symbol=>Object}] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param type [Symbol, :stored_completions] - # # - # def initialize(schema:, metadata: nil, type: :stored_completions, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(schema:, metadata: nil, type: :stored_completions) + # A StoredCompletionsDataSourceConfig which specifies the metadata property of + # your stored completions query. This is usually metadata like `usecase=chatbot` + # or `prompt-version=v2`, etc. The schema returned by this data source config is + # used to defined what variables are available in your evals. `item` and `sample` + # are both defined when using this data source config. + # + # @param schema [Hash{Symbol=>Object}] + # @param metadata [Hash{Symbol=>String}, nil] + # @param type [Symbol, :stored_completions] end end end diff --git a/lib/openai/models/eval_string_check_grader.rb b/lib/openai/models/eval_string_check_grader.rb index 57192594..421bb059 100644 --- a/lib/openai/models/eval_string_check_grader.rb +++ b/lib/openai/models/eval_string_check_grader.rb @@ -33,19 +33,15 @@ class EvalStringCheckGrader < OpenAI::Internal::Type::BaseModel # @return [Symbol, :string_check] required :type, const: :string_check - # @!parse - # # A StringCheckGrader object that performs a string comparison between input and - # # reference using a specified operation. - # # - # # @param input [String] - # # @param name [String] - # # @param operation [Symbol, OpenAI::Models::EvalStringCheckGrader::Operation] - # # @param reference [String] - # # @param type [Symbol, :string_check] - # # - # def initialize(input:, name:, operation:, reference:, type: :string_check, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, name:, operation:, reference:, type: :string_check) + # A StringCheckGrader object that performs a string comparison between input and + # reference using a specified operation. + # + # @param input [String] + # @param name [String] + # @param operation [Symbol, OpenAI::Models::EvalStringCheckGrader::Operation] + # @param reference [String] + # @param type [Symbol, :string_check] # The string check operation to perform. One of `eq`, `ne`, `like`, or `ilike`. # @@ -58,11 +54,8 @@ module Operation LIKE = :like ILIKE = :ilike - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/eval_text_similarity_grader.rb b/lib/openai/models/eval_text_similarity_grader.rb index 4362ad72..9ff351b4 100644 --- a/lib/openai/models/eval_text_similarity_grader.rb +++ b/lib/openai/models/eval_text_similarity_grader.rb @@ -44,19 +44,15 @@ class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # A TextSimilarityGrader object which grades text based on similarity metrics. - # # - # # @param evaluation_metric [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] - # # @param input [String] - # # @param pass_threshold [Float] - # # @param reference [String] - # # @param name [String] - # # @param type [Symbol, :text_similarity] - # # - # def initialize(evaluation_metric:, input:, pass_threshold:, reference:, name: nil, type: :text_similarity, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(evaluation_metric:, input:, pass_threshold:, reference:, name: nil, type: :text_similarity) + # A TextSimilarityGrader object which grades text based on similarity metrics. + # + # @param evaluation_metric [Symbol, OpenAI::Models::EvalTextSimilarityGrader::EvaluationMetric] + # @param input [String] + # @param pass_threshold [Float] + # @param reference [String] + # @param name [String] + # @param type [Symbol, :text_similarity] # The evaluation metric to use. One of `cosine`, `fuzzy_match`, `bleu`, `gleu`, # `meteor`, `rouge_1`, `rouge_2`, `rouge_3`, `rouge_4`, `rouge_5`, or `rouge_l`. @@ -77,11 +73,8 @@ module EvaluationMetric ROUGE_L = :rouge_l COSINE = :cosine - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/eval_update_params.rb b/lib/openai/models/eval_update_params.rb index babe7583..8572bf39 100644 --- a/lib/openai/models/eval_update_params.rb +++ b/lib/openai/models/eval_update_params.rb @@ -29,14 +29,10 @@ class EvalUpdateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(metadata: nil, name: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(metadata: nil, name: nil, request_options: {}) + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/eval_update_response.rb b/lib/openai/models/eval_update_response.rb index b80950ed..08e01385 100644 --- a/lib/openai/models/eval_update_response.rb +++ b/lib/openai/models/eval_update_response.rb @@ -58,38 +58,22 @@ class EvalUpdateResponse < OpenAI::Internal::Type::BaseModel required :testing_criteria, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::EvalUpdateResponse::TestingCriterion] } - # @!parse - # # An Eval object with a data source config and testing criteria. An Eval - # # represents a task to be done for your LLM integration. Like: - # # - # # - Improve the quality of my chatbot - # # - See how well my chatbot handles customer support - # # - Check if o3-mini is better at my usecase than gpt-4o - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param share_with_openai [Boolean] - # # @param testing_criteria [Array] - # # @param object [Symbol, :eval] - # # - # def initialize( - # id:, - # created_at:, - # data_source_config:, - # metadata:, - # name:, - # share_with_openai:, - # testing_criteria:, - # object: :eval, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source_config:, metadata:, name:, share_with_openai:, testing_criteria:, object: :eval) + # An Eval object with a data source config and testing criteria. An Eval + # represents a task to be done for your LLM integration. Like: + # + # - Improve the quality of my chatbot + # - See how well my chatbot handles customer support + # - Check if o3-mini is better at my usecase than gpt-4o + # + # @param id [String] + # @param created_at [Integer] + # @param data_source_config [OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param share_with_openai [Boolean] + # @param testing_criteria [Array] + # @param object [Symbol, :eval] # Configuration of data sources used in runs of the evaluation. # @@ -111,9 +95,8 @@ module DataSourceConfig # `item` and `sample` are both defined when using this data source config. variant :stored_completions, -> { OpenAI::Models::EvalStoredCompletionsDataSourceConfig } - # @!parse - # # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalCustomDataSourceConfig, OpenAI::Models::EvalStoredCompletionsDataSourceConfig)] end # A LabelModelGrader object which uses a model to assign labels to each item in @@ -133,9 +116,8 @@ module TestingCriterion # A TextSimilarityGrader object which grades text based on similarity metrics. variant :text_similarity, -> { OpenAI::Models::EvalTextSimilarityGrader } - # @!parse - # # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::EvalLabelModelGrader, OpenAI::Models::EvalStringCheckGrader, OpenAI::Models::EvalTextSimilarityGrader)] end end end diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 212bc40a..56b9b732 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -38,18 +38,14 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] # attr_writer :sampling_params - # @!parse - # # A CompletionsRunDataSource object describing a model sampling configuration. - # # - # # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] - # # @param model [String] - # # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] - # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] - # # @param sampling_params [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] - # # - # def initialize(input_messages:, model:, source:, type:, sampling_params: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input_messages:, model:, source:, type:, sampling_params: nil) + # A CompletionsRunDataSource object describing a model sampling configuration. + # + # @param input_messages [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference] + # @param model [String] + # @param source [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] + # @param sampling_params [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#input_messages module InputMessages @@ -78,13 +74,9 @@ class Template < OpenAI::Internal::Type::BaseModel # @return [Symbol, :template] required :type, const: :template - # @!parse - # # @param template [Array] - # # @param type [Symbol, :template] - # # - # def initialize(template:, type: :template, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(template:, type: :template) + # @param template [Array] + # @param type [Symbol, :template] module Template extend OpenAI::Internal::Type::Union @@ -108,13 +100,9 @@ class ChatMessage < OpenAI::Internal::Type::BaseModel # @return [String] required :role, String - # @!parse - # # @param content [String] - # # @param role [String] - # # - # def initialize(content:, role:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:) + # @param content [String] + # @param role [String] end class InputMessage < OpenAI::Internal::Type::BaseModel @@ -138,14 +126,10 @@ class InputMessage < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type } - # @!parse - # # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content] - # # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role] - # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type] - # # - # def initialize(content:, role:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type:) + # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content] + # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Role] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Type] # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage#content class Content < OpenAI::Internal::Type::BaseModel @@ -162,13 +146,9 @@ class Content < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage::Content::Type] # The type of content, which is always `input_text`. # @@ -178,11 +158,8 @@ module Type INPUT_TEXT = :input_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -196,11 +173,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of item, which is always `message`. @@ -211,11 +185,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -240,14 +211,10 @@ class OutputMessage < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type } - # @!parse - # # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content] - # # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role] - # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type] - # # - # def initialize(content:, role:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type:) + # @param content [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content] + # @param role [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Role] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Type] # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage#content class Content < OpenAI::Internal::Type::BaseModel @@ -264,13 +231,9 @@ class Content < OpenAI::Internal::Type::BaseModel required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage::Content::Type] # The type of content, which is always `output_text`. # @@ -280,11 +243,8 @@ module Type OUTPUT_TEXT = :output_text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -296,11 +256,8 @@ module Role ASSISTANT = :assistant - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of item, which is always `message`. @@ -311,17 +268,13 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::ChatMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::InputMessage, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template::Template::OutputMessage)] end end @@ -338,18 +291,13 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @return [Symbol, :item_reference] required :type, const: :item_reference - # @!parse - # # @param item_reference [String] - # # @param type [Symbol, :item_reference] - # # - # def initialize(item_reference:, type: :item_reference, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_reference:, type: :item_reference) + # @param item_reference [String] + # @param type [Symbol, :item_reference] end - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::Template, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::InputMessages::ItemReference)] end # A StoredCompletionsRunDataSource configuration describing a set of filters @@ -383,13 +331,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_content] required :type, const: :file_content - # @!parse - # # @param content [Array] - # # @param type [Symbol, :file_content] - # # - # def initialize(content:, type: :file_content, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, type: :file_content) + # @param content [Array] + # @param type [Symbol, :file_content] class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -406,13 +350,9 @@ class Content < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>Object}] # attr_writer :sample - # @!parse - # # @param item [Hash{Symbol=>Object}] - # # @param sample [Hash{Symbol=>Object}] - # # - # def initialize(item:, sample: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] end end @@ -429,13 +369,9 @@ class FileID < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_id] required :type, const: :file_id - # @!parse - # # @param id [String] - # # @param type [Symbol, :file_id] - # # - # def initialize(id:, type: :file_id, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, type: :file_id) + # @param id [String] + # @param type [Symbol, :file_id] end class StoredCompletions < OpenAI::Internal::Type::BaseModel @@ -480,24 +416,19 @@ class StoredCompletions < OpenAI::Internal::Type::BaseModel # @return [Symbol, :stored_completions] required :type, const: :stored_completions - # @!parse - # # A StoredCompletionsRunDataSource configuration describing a set of filters - # # - # # @param created_after [Integer, nil] - # # @param created_before [Integer, nil] - # # @param limit [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, nil] - # # @param type [Symbol, :stored_completions] - # # - # def initialize(created_after:, created_before:, limit:, metadata:, model:, type: :stored_completions, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(created_after:, created_before:, limit:, metadata:, model:, type: :stored_completions) + # A StoredCompletionsRunDataSource configuration describing a set of filters + # + # @param created_after [Integer, nil] + # @param created_before [Integer, nil] + # @param limit [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, nil] + # @param type [Symbol, :stored_completions] end - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::FileID, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Source::StoredCompletions)] end # The type of run data source. Always `completions`. @@ -508,11 +439,8 @@ module Type COMPLETIONS = :completions - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#sampling_params @@ -557,15 +485,11 @@ class SamplingParams < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :top_p - # @!parse - # # @param max_completion_tokens [Integer] - # # @param seed [Integer] - # # @param temperature [Float] - # # @param top_p [Float] - # # - # def initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) + # @param max_completion_tokens [Integer] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] end end end diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index a3e22ebb..3b06b922 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -15,16 +15,12 @@ class CreateEvalJSONLRunDataSource < OpenAI::Internal::Type::BaseModel # @return [Symbol, :jsonl] required :type, const: :jsonl - # @!parse - # # A JsonlRunDataSource object with that specifies a JSONL file that matches the - # # eval - # # - # # @param source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] - # # @param type [Symbol, :jsonl] - # # - # def initialize(source:, type: :jsonl, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(source:, type: :jsonl) + # A JsonlRunDataSource object with that specifies a JSONL file that matches the + # eval + # + # @param source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID] + # @param type [Symbol, :jsonl] # @see OpenAI::Models::Evals::CreateEvalJSONLRunDataSource#source module Source @@ -50,13 +46,9 @@ class FileContent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_content] required :type, const: :file_content - # @!parse - # # @param content [Array] - # # @param type [Symbol, :file_content] - # # - # def initialize(content:, type: :file_content, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, type: :file_content) + # @param content [Array] + # @param type [Symbol, :file_content] class Content < OpenAI::Internal::Type::BaseModel # @!attribute item @@ -73,13 +65,9 @@ class Content < OpenAI::Internal::Type::BaseModel # # @return [Hash{Symbol=>Object}] # attr_writer :sample - # @!parse - # # @param item [Hash{Symbol=>Object}] - # # @param sample [Hash{Symbol=>Object}] - # # - # def initialize(item:, sample: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item:, sample: nil) + # @param item [Hash{Symbol=>Object}] + # @param sample [Hash{Symbol=>Object}] end end @@ -96,18 +84,13 @@ class FileID < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_id] required :type, const: :file_id - # @!parse - # # @param id [String] - # # @param type [Symbol, :file_id] - # # - # def initialize(id:, type: :file_id, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, type: :file_id) + # @param id [String] + # @param type [Symbol, :file_id] end - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileContent, OpenAI::Models::Evals::CreateEvalJSONLRunDataSource::Source::FileID)] end end end diff --git a/lib/openai/models/evals/eval_api_error.rb b/lib/openai/models/evals/eval_api_error.rb index 11b56e24..4d88b8dc 100644 --- a/lib/openai/models/evals/eval_api_error.rb +++ b/lib/openai/models/evals/eval_api_error.rb @@ -16,15 +16,11 @@ class EvalAPIError < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # An object representing an error response from the Eval API. - # # - # # @param code [String] - # # @param message [String] - # # - # def initialize(code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:) + # An object representing an error response from the Eval API. + # + # @param code [String] + # @param message [String] end end diff --git a/lib/openai/models/evals/run_cancel_params.rb b/lib/openai/models/evals/run_cancel_params.rb index b492403f..958dad3b 100644 --- a/lib/openai/models/evals/run_cancel_params.rb +++ b/lib/openai/models/evals/run_cancel_params.rb @@ -14,13 +14,9 @@ class RunCancelParams < OpenAI::Internal::Type::BaseModel # @return [String] required :eval_id, String - # @!parse - # # @param eval_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(eval_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(eval_id:, request_options: {}) + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/evals/run_cancel_response.rb b/lib/openai/models/evals/run_cancel_response.rb index 2121c8f8..19775cff 100644 --- a/lib/openai/models/evals/run_cancel_response.rb +++ b/lib/openai/models/evals/run_cancel_response.rb @@ -96,45 +96,23 @@ class RunCancelResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param eval_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param name [String] - # # @param per_model_usage [Array] - # # @param per_testing_criteria_results [Array] - # # @param report_url [String] - # # @param result_counts [OpenAI::Models::Evals::RunCancelResponse::ResultCounts] - # # @param status [String] - # # @param object [Symbol, :"eval.run"] - # # - # def initialize( - # id:, - # created_at:, - # data_source:, - # error:, - # eval_id:, - # metadata:, - # model:, - # name:, - # per_model_usage:, - # per_testing_criteria_results:, - # report_url:, - # result_counts:, - # status:, - # object: :"eval.run", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # A schema representing an evaluation run. + # + # @param id [String] + # @param created_at [Integer] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param eval_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param name [String] + # @param per_model_usage [Array] + # @param per_testing_criteria_results [Array] + # @param report_url [String] + # @param result_counts [OpenAI::Models::Evals::RunCancelResponse::ResultCounts] + # @param status [String] + # @param object [Symbol, :"eval.run"] # Information about the run's data source. # @@ -150,9 +128,8 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel @@ -192,17 +169,13 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param invocation_count [Integer] - # # @param model_name [String] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param invocation_count [Integer] + # @param model_name [String] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -224,14 +197,10 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel # @return [String] required :testing_criteria, String - # @!parse - # # @param failed [Integer] - # # @param passed [Integer] - # # @param testing_criteria [String] - # # - # def initialize(failed:, passed:, testing_criteria:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(failed:, passed:, testing_criteria:) + # @param failed [Integer] + # @param passed [Integer] + # @param testing_criteria [String] end # @see OpenAI::Models::Evals::RunCancelResponse#result_counts @@ -260,17 +229,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # Counters summarizing the outcomes of the evaluation run. - # # - # # @param errored [Integer] - # # @param failed [Integer] - # # @param passed [Integer] - # # @param total [Integer] - # # - # def initialize(errored:, failed:, passed:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(errored:, failed:, passed:, total:) + # Counters summarizing the outcomes of the evaluation run. + # + # @param errored [Integer] + # @param failed [Integer] + # @param passed [Integer] + # @param total [Integer] end end end diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index c9ccea28..3e03a25c 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -36,15 +36,11 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(data_source:, metadata: nil, name: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data_source:, metadata: nil, name: nil, request_options: {}) + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Details about the run's data source. module DataSource @@ -56,9 +52,8 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] end end end diff --git a/lib/openai/models/evals/run_create_response.rb b/lib/openai/models/evals/run_create_response.rb index d890f4c5..25892cb6 100644 --- a/lib/openai/models/evals/run_create_response.rb +++ b/lib/openai/models/evals/run_create_response.rb @@ -96,45 +96,23 @@ class RunCreateResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param eval_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param name [String] - # # @param per_model_usage [Array] - # # @param per_testing_criteria_results [Array] - # # @param report_url [String] - # # @param result_counts [OpenAI::Models::Evals::RunCreateResponse::ResultCounts] - # # @param status [String] - # # @param object [Symbol, :"eval.run"] - # # - # def initialize( - # id:, - # created_at:, - # data_source:, - # error:, - # eval_id:, - # metadata:, - # model:, - # name:, - # per_model_usage:, - # per_testing_criteria_results:, - # report_url:, - # result_counts:, - # status:, - # object: :"eval.run", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # A schema representing an evaluation run. + # + # @param id [String] + # @param created_at [Integer] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param eval_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param name [String] + # @param per_model_usage [Array] + # @param per_testing_criteria_results [Array] + # @param report_url [String] + # @param result_counts [OpenAI::Models::Evals::RunCreateResponse::ResultCounts] + # @param status [String] + # @param object [Symbol, :"eval.run"] # Information about the run's data source. # @@ -150,9 +128,8 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel @@ -192,17 +169,13 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param invocation_count [Integer] - # # @param model_name [String] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param invocation_count [Integer] + # @param model_name [String] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -224,14 +197,10 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel # @return [String] required :testing_criteria, String - # @!parse - # # @param failed [Integer] - # # @param passed [Integer] - # # @param testing_criteria [String] - # # - # def initialize(failed:, passed:, testing_criteria:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(failed:, passed:, testing_criteria:) + # @param failed [Integer] + # @param passed [Integer] + # @param testing_criteria [String] end # @see OpenAI::Models::Evals::RunCreateResponse#result_counts @@ -260,17 +229,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # Counters summarizing the outcomes of the evaluation run. - # # - # # @param errored [Integer] - # # @param failed [Integer] - # # @param passed [Integer] - # # @param total [Integer] - # # - # def initialize(errored:, failed:, passed:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(errored:, failed:, passed:, total:) + # Counters summarizing the outcomes of the evaluation run. + # + # @param errored [Integer] + # @param failed [Integer] + # @param passed [Integer] + # @param total [Integer] end end end diff --git a/lib/openai/models/evals/run_delete_params.rb b/lib/openai/models/evals/run_delete_params.rb index 71cdb827..887478b6 100644 --- a/lib/openai/models/evals/run_delete_params.rb +++ b/lib/openai/models/evals/run_delete_params.rb @@ -14,13 +14,9 @@ class RunDeleteParams < OpenAI::Internal::Type::BaseModel # @return [String] required :eval_id, String - # @!parse - # # @param eval_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(eval_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(eval_id:, request_options: {}) + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/evals/run_delete_response.rb b/lib/openai/models/evals/run_delete_response.rb index 68412255..bd1154dd 100644 --- a/lib/openai/models/evals/run_delete_response.rb +++ b/lib/openai/models/evals/run_delete_response.rb @@ -32,14 +32,10 @@ class RunDeleteResponse < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :run_id - # @!parse - # # @param deleted [Boolean] - # # @param object [String] - # # @param run_id [String] - # # - # def initialize(deleted: nil, object: nil, run_id: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(deleted: nil, object: nil, run_id: nil) + # @param deleted [Boolean] + # @param object [String] + # @param run_id [String] end end end diff --git a/lib/openai/models/evals/run_list_params.rb b/lib/openai/models/evals/run_list_params.rb index 6dfe1fc2..b5c500f7 100644 --- a/lib/openai/models/evals/run_list_params.rb +++ b/lib/openai/models/evals/run_list_params.rb @@ -51,16 +51,12 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status] # attr_writer :status - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] - # # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Evals::RunListParams::Order] + # @param status [Symbol, OpenAI::Models::Evals::RunListParams::Status] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for # descending order. Defaults to `asc`. @@ -70,11 +66,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | @@ -88,11 +81,8 @@ module Status CANCELED = :canceled FAILED = :failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/evals/run_list_response.rb b/lib/openai/models/evals/run_list_response.rb index b31795e0..cda98be8 100644 --- a/lib/openai/models/evals/run_list_response.rb +++ b/lib/openai/models/evals/run_list_response.rb @@ -96,45 +96,23 @@ class RunListResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param eval_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param name [String] - # # @param per_model_usage [Array] - # # @param per_testing_criteria_results [Array] - # # @param report_url [String] - # # @param result_counts [OpenAI::Models::Evals::RunListResponse::ResultCounts] - # # @param status [String] - # # @param object [Symbol, :"eval.run"] - # # - # def initialize( - # id:, - # created_at:, - # data_source:, - # error:, - # eval_id:, - # metadata:, - # model:, - # name:, - # per_model_usage:, - # per_testing_criteria_results:, - # report_url:, - # result_counts:, - # status:, - # object: :"eval.run", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # A schema representing an evaluation run. + # + # @param id [String] + # @param created_at [Integer] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param eval_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param name [String] + # @param per_model_usage [Array] + # @param per_testing_criteria_results [Array] + # @param report_url [String] + # @param result_counts [OpenAI::Models::Evals::RunListResponse::ResultCounts] + # @param status [String] + # @param object [Symbol, :"eval.run"] # Information about the run's data source. # @@ -150,9 +128,8 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel @@ -192,17 +169,13 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param invocation_count [Integer] - # # @param model_name [String] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param invocation_count [Integer] + # @param model_name [String] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -224,14 +197,10 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel # @return [String] required :testing_criteria, String - # @!parse - # # @param failed [Integer] - # # @param passed [Integer] - # # @param testing_criteria [String] - # # - # def initialize(failed:, passed:, testing_criteria:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(failed:, passed:, testing_criteria:) + # @param failed [Integer] + # @param passed [Integer] + # @param testing_criteria [String] end # @see OpenAI::Models::Evals::RunListResponse#result_counts @@ -260,17 +229,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # Counters summarizing the outcomes of the evaluation run. - # # - # # @param errored [Integer] - # # @param failed [Integer] - # # @param passed [Integer] - # # @param total [Integer] - # # - # def initialize(errored:, failed:, passed:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(errored:, failed:, passed:, total:) + # Counters summarizing the outcomes of the evaluation run. + # + # @param errored [Integer] + # @param failed [Integer] + # @param passed [Integer] + # @param total [Integer] end end end diff --git a/lib/openai/models/evals/run_retrieve_params.rb b/lib/openai/models/evals/run_retrieve_params.rb index f17f16a7..648fa819 100644 --- a/lib/openai/models/evals/run_retrieve_params.rb +++ b/lib/openai/models/evals/run_retrieve_params.rb @@ -14,13 +14,9 @@ class RunRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :eval_id, String - # @!parse - # # @param eval_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(eval_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(eval_id:, request_options: {}) + # @param eval_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/evals/run_retrieve_response.rb b/lib/openai/models/evals/run_retrieve_response.rb index 219fbd4c..dabdd0a5 100644 --- a/lib/openai/models/evals/run_retrieve_response.rb +++ b/lib/openai/models/evals/run_retrieve_response.rb @@ -96,45 +96,23 @@ class RunRetrieveResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param eval_id [String] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String] - # # @param name [String] - # # @param per_model_usage [Array] - # # @param per_testing_criteria_results [Array] - # # @param report_url [String] - # # @param result_counts [OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts] - # # @param status [String] - # # @param object [Symbol, :"eval.run"] - # # - # def initialize( - # id:, - # created_at:, - # data_source:, - # error:, - # eval_id:, - # metadata:, - # model:, - # name:, - # per_model_usage:, - # per_testing_criteria_results:, - # report_url:, - # result_counts:, - # status:, - # object: :"eval.run", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, data_source:, error:, eval_id:, metadata:, model:, name:, per_model_usage:, per_testing_criteria_results:, report_url:, result_counts:, status:, object: :"eval.run") + # A schema representing an evaluation run. + # + # @param id [String] + # @param created_at [Integer] + # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param eval_id [String] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String] + # @param name [String] + # @param per_model_usage [Array] + # @param per_testing_criteria_results [Array] + # @param report_url [String] + # @param result_counts [OpenAI::Models::Evals::RunRetrieveResponse::ResultCounts] + # @param status [String] + # @param object [Symbol, :"eval.run"] # Information about the run's data source. # @@ -150,9 +128,8 @@ module DataSource # A CompletionsRunDataSource object describing a model sampling configuration. variant :completions, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource } - # @!parse - # # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource)] end class PerModelUsage < OpenAI::Internal::Type::BaseModel @@ -192,17 +169,13 @@ class PerModelUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param invocation_count [Integer] - # # @param model_name [String] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, invocation_count:, model_name:, prompt_tokens:, total_tokens:) + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param invocation_count [Integer] + # @param model_name [String] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel @@ -224,14 +197,10 @@ class PerTestingCriteriaResult < OpenAI::Internal::Type::BaseModel # @return [String] required :testing_criteria, String - # @!parse - # # @param failed [Integer] - # # @param passed [Integer] - # # @param testing_criteria [String] - # # - # def initialize(failed:, passed:, testing_criteria:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(failed:, passed:, testing_criteria:) + # @param failed [Integer] + # @param passed [Integer] + # @param testing_criteria [String] end # @see OpenAI::Models::Evals::RunRetrieveResponse#result_counts @@ -260,17 +229,13 @@ class ResultCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # Counters summarizing the outcomes of the evaluation run. - # # - # # @param errored [Integer] - # # @param failed [Integer] - # # @param passed [Integer] - # # @param total [Integer] - # # - # def initialize(errored:, failed:, passed:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(errored:, failed:, passed:, total:) + # Counters summarizing the outcomes of the evaluation run. + # + # @param errored [Integer] + # @param failed [Integer] + # @param passed [Integer] + # @param total [Integer] end end end diff --git a/lib/openai/models/evals/runs/output_item_list_params.rb b/lib/openai/models/evals/runs/output_item_list_params.rb index e4b6424f..dc3ba2dd 100644 --- a/lib/openai/models/evals/runs/output_item_list_params.rb +++ b/lib/openai/models/evals/runs/output_item_list_params.rb @@ -57,17 +57,13 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] # attr_writer :status - # @!parse - # # @param eval_id [String] - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] - # # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}) + # @param eval_id [String] + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] + # @param status [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order for output items by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. @@ -77,11 +73,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Filter output items by status. Use `failed` to filter by failed output items or @@ -92,11 +85,8 @@ module Status FAIL = :fail PASS = :pass - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/evals/runs/output_item_list_response.rb b/lib/openai/models/evals/runs/output_item_list_response.rb index 1b9744e6..fe4db7ad 100644 --- a/lib/openai/models/evals/runs/output_item_list_response.rb +++ b/lib/openai/models/evals/runs/output_item_list_response.rb @@ -67,37 +67,19 @@ class OutputItemListResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run output item. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param datasource_item [Hash{Symbol=>Object}] - # # @param datasource_item_id [Integer] - # # @param eval_id [String] - # # @param results [ArrayObject}>] - # # @param run_id [String] - # # @param sample [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample] - # # @param status [String] - # # @param object [Symbol, :"eval.run.output_item"] - # # - # def initialize( - # id:, - # created_at:, - # datasource_item:, - # datasource_item_id:, - # eval_id:, - # results:, - # run_id:, - # sample:, - # status:, - # object: :"eval.run.output_item", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, datasource_item:, datasource_item_id:, eval_id:, results:, run_id:, sample:, status:, object: :"eval.run.output_item") + # A schema representing an evaluation run output item. + # + # @param id [String] + # @param created_at [Integer] + # @param datasource_item [Hash{Symbol=>Object}] + # @param datasource_item_id [Integer] + # @param eval_id [String] + # @param results [ArrayObject}>] + # @param run_id [String] + # @param sample [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample] + # @param status [String] + # @param object [Symbol, :"eval.run.output_item"] # @see OpenAI::Models::Evals::Runs::OutputItemListResponse#sample class Sample < OpenAI::Internal::Type::BaseModel @@ -163,37 +145,19 @@ class Sample < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage] required :usage, -> { OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage } - # @!parse - # # A sample containing the input and output of the evaluation run. - # # - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param finish_reason [String] - # # @param input [Array] - # # @param max_completion_tokens [Integer] - # # @param model [String] - # # @param output [Array] - # # @param seed [Integer] - # # @param temperature [Float] - # # @param top_p [Float] - # # @param usage [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage] - # # - # def initialize( - # error:, - # finish_reason:, - # input:, - # max_completion_tokens:, - # model:, - # output:, - # seed:, - # temperature:, - # top_p:, - # usage:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(error:, finish_reason:, input:, max_completion_tokens:, model:, output:, seed:, temperature:, top_p:, usage:) + # A sample containing the input and output of the evaluation run. + # + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param finish_reason [String] + # @param input [Array] + # @param max_completion_tokens [Integer] + # @param model [String] + # @param output [Array] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] + # @param usage [OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample::Usage] class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -208,15 +172,11 @@ class Input < OpenAI::Internal::Type::BaseModel # @return [String] required :role, String - # @!parse - # # An input message. - # # - # # @param content [String] - # # @param role [String] - # # - # def initialize(content:, role:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:) + # An input message. + # + # @param content [String] + # @param role [String] end class Output < OpenAI::Internal::Type::BaseModel @@ -240,13 +200,9 @@ class Output < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :role - # @!parse - # # @param content [String] - # # @param role [String] - # # - # def initialize(content: nil, role: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content: nil, role: nil) + # @param content [String] + # @param role [String] end # @see OpenAI::Models::Evals::Runs::OutputItemListResponse::Sample#usage @@ -275,17 +231,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # Token usage details for the sample. - # # - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:) + # Token usage details for the sample. + # + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end end end diff --git a/lib/openai/models/evals/runs/output_item_retrieve_params.rb b/lib/openai/models/evals/runs/output_item_retrieve_params.rb index e6154bee..599a0b19 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_params.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_params.rb @@ -20,14 +20,10 @@ class OutputItemRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :run_id, String - # @!parse - # # @param eval_id [String] - # # @param run_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(eval_id:, run_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(eval_id:, run_id:, request_options: {}) + # @param eval_id [String] + # @param run_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/evals/runs/output_item_retrieve_response.rb b/lib/openai/models/evals/runs/output_item_retrieve_response.rb index 7c7798fb..bf311b56 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_response.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_response.rb @@ -67,37 +67,19 @@ class OutputItemRetrieveResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :status, String - # @!parse - # # A schema representing an evaluation run output item. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param datasource_item [Hash{Symbol=>Object}] - # # @param datasource_item_id [Integer] - # # @param eval_id [String] - # # @param results [ArrayObject}>] - # # @param run_id [String] - # # @param sample [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample] - # # @param status [String] - # # @param object [Symbol, :"eval.run.output_item"] - # # - # def initialize( - # id:, - # created_at:, - # datasource_item:, - # datasource_item_id:, - # eval_id:, - # results:, - # run_id:, - # sample:, - # status:, - # object: :"eval.run.output_item", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, datasource_item:, datasource_item_id:, eval_id:, results:, run_id:, sample:, status:, object: :"eval.run.output_item") + # A schema representing an evaluation run output item. + # + # @param id [String] + # @param created_at [Integer] + # @param datasource_item [Hash{Symbol=>Object}] + # @param datasource_item_id [Integer] + # @param eval_id [String] + # @param results [ArrayObject}>] + # @param run_id [String] + # @param sample [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample] + # @param status [String] + # @param object [Symbol, :"eval.run.output_item"] # @see OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse#sample class Sample < OpenAI::Internal::Type::BaseModel @@ -163,37 +145,19 @@ class Sample < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage] required :usage, -> { OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage } - # @!parse - # # A sample containing the input and output of the evaluation run. - # # - # # @param error [OpenAI::Models::Evals::EvalAPIError] - # # @param finish_reason [String] - # # @param input [Array] - # # @param max_completion_tokens [Integer] - # # @param model [String] - # # @param output [Array] - # # @param seed [Integer] - # # @param temperature [Float] - # # @param top_p [Float] - # # @param usage [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage] - # # - # def initialize( - # error:, - # finish_reason:, - # input:, - # max_completion_tokens:, - # model:, - # output:, - # seed:, - # temperature:, - # top_p:, - # usage:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(error:, finish_reason:, input:, max_completion_tokens:, model:, output:, seed:, temperature:, top_p:, usage:) + # A sample containing the input and output of the evaluation run. + # + # @param error [OpenAI::Models::Evals::EvalAPIError] + # @param finish_reason [String] + # @param input [Array] + # @param max_completion_tokens [Integer] + # @param model [String] + # @param output [Array] + # @param seed [Integer] + # @param temperature [Float] + # @param top_p [Float] + # @param usage [OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample::Usage] class Input < OpenAI::Internal::Type::BaseModel # @!attribute content @@ -208,15 +172,11 @@ class Input < OpenAI::Internal::Type::BaseModel # @return [String] required :role, String - # @!parse - # # An input message. - # # - # # @param content [String] - # # @param role [String] - # # - # def initialize(content:, role:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:) + # An input message. + # + # @param content [String] + # @param role [String] end class Output < OpenAI::Internal::Type::BaseModel @@ -240,13 +200,9 @@ class Output < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :role - # @!parse - # # @param content [String] - # # @param role [String] - # # - # def initialize(content: nil, role: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content: nil, role: nil) + # @param content [String] + # @param role [String] end # @see OpenAI::Models::Evals::Runs::OutputItemRetrieveResponse::Sample#usage @@ -275,17 +231,13 @@ class Usage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # Token usage details for the sample. - # # - # # @param cached_tokens [Integer] - # # @param completion_tokens [Integer] - # # @param prompt_tokens [Integer] - # # @param total_tokens [Integer] - # # - # def initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:, completion_tokens:, prompt_tokens:, total_tokens:) + # Token usage details for the sample. + # + # @param cached_tokens [Integer] + # @param completion_tokens [Integer] + # @param prompt_tokens [Integer] + # @param total_tokens [Integer] end end end diff --git a/lib/openai/models/file_chunking_strategy.rb b/lib/openai/models/file_chunking_strategy.rb index 0c37933e..a7354de0 100644 --- a/lib/openai/models/file_chunking_strategy.rb +++ b/lib/openai/models/file_chunking_strategy.rb @@ -13,9 +13,8 @@ module FileChunkingStrategy # This is returned when the chunking strategy is unknown. Typically, this is because the file was indexed before the `chunking_strategy` concept was introduced in the API. variant :other, -> { OpenAI::Models::OtherFileChunkingStrategyObject } - # @!parse - # # @return [Array(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject)] end end end diff --git a/lib/openai/models/file_chunking_strategy_param.rb b/lib/openai/models/file_chunking_strategy_param.rb index 8a671209..7f5a2487 100644 --- a/lib/openai/models/file_chunking_strategy_param.rb +++ b/lib/openai/models/file_chunking_strategy_param.rb @@ -15,9 +15,8 @@ module FileChunkingStrategyParam # Customize your own chunking strategy by setting chunk size and chunk overlap. variant :static, -> { OpenAI::Models::StaticFileChunkingStrategyObjectParam } - # @!parse - # # @return [Array(OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam)] end end end diff --git a/lib/openai/models/file_content_params.rb b/lib/openai/models/file_content_params.rb index 96b979af..5557fffd 100644 --- a/lib/openai/models/file_content_params.rb +++ b/lib/openai/models/file_content_params.rb @@ -8,12 +8,8 @@ class FileContentParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index de4e8770..567ed995 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -23,14 +23,10 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::Models::FilePurpose } - # @!parse - # # @param file [Pathname, StringIO] - # # @param purpose [Symbol, OpenAI::Models::FilePurpose] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(file:, purpose:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file:, purpose:, request_options: {}) + # @param file [Pathname, StringIO] + # @param purpose [Symbol, OpenAI::Models::FilePurpose] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/file_delete_params.rb b/lib/openai/models/file_delete_params.rb index fda911ad..177b99cb 100644 --- a/lib/openai/models/file_delete_params.rb +++ b/lib/openai/models/file_delete_params.rb @@ -8,12 +8,8 @@ class FileDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/file_deleted.rb b/lib/openai/models/file_deleted.rb index 67ab7b27..28517280 100644 --- a/lib/openai/models/file_deleted.rb +++ b/lib/openai/models/file_deleted.rb @@ -19,14 +19,10 @@ class FileDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file] required :object, const: :file - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :file] - # # - # def initialize(id:, deleted:, object: :file, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :file) + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :file] end end end diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index ccc569bf..5f1be612 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -53,16 +53,12 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :purpose - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::FileListParams::Order] - # # @param purpose [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::FileListParams::Order] + # @param purpose [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -72,11 +68,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index d694613d..fd9c63bb 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -70,35 +70,18 @@ class FileObject < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :status_details - # @!parse - # # The `File` object represents a document that has been uploaded to OpenAI. - # # - # # @param id [String] - # # @param bytes [Integer] - # # @param created_at [Integer] - # # @param filename [String] - # # @param purpose [Symbol, OpenAI::Models::FileObject::Purpose] - # # @param status [Symbol, OpenAI::Models::FileObject::Status] - # # @param expires_at [Integer] - # # @param status_details [String] - # # @param object [Symbol, :file] - # # - # def initialize( - # id:, - # bytes:, - # created_at:, - # filename:, - # purpose:, - # status:, - # expires_at: nil, - # status_details: nil, - # object: :file, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, bytes:, created_at:, filename:, purpose:, status:, expires_at: nil, status_details: nil, object: :file) + # The `File` object represents a document that has been uploaded to OpenAI. + # + # @param id [String] + # @param bytes [Integer] + # @param created_at [Integer] + # @param filename [String] + # @param purpose [Symbol, OpenAI::Models::FileObject::Purpose] + # @param status [Symbol, OpenAI::Models::FileObject::Status] + # @param expires_at [Integer] + # @param status_details [String] + # @param object [Symbol, :file] # The intended purpose of the file. Supported values are `assistants`, # `assistants_output`, `batch`, `batch_output`, `fine-tune`, `fine-tune-results` @@ -116,11 +99,8 @@ module Purpose FINE_TUNE_RESULTS = :"fine-tune-results" VISION = :vision - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @deprecated @@ -136,11 +116,8 @@ module Status PROCESSED = :processed ERROR = :error - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/file_purpose.rb b/lib/openai/models/file_purpose.rb index c11caef0..0f1ca442 100644 --- a/lib/openai/models/file_purpose.rb +++ b/lib/openai/models/file_purpose.rb @@ -16,11 +16,8 @@ module FilePurpose USER_DATA = :user_data EVALS = :evals - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/file_retrieve_params.rb b/lib/openai/models/file_retrieve_params.rb index ec128c3d..6c8c1a70 100644 --- a/lib/openai/models/file_retrieve_params.rb +++ b/lib/openai/models/file_retrieve_params.rb @@ -8,12 +8,8 @@ class FileRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb index b569ae0c..ef958285 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb @@ -16,13 +16,9 @@ class PermissionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array] required :project_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @param project_ids [Array] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(project_ids:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(project_ids:, request_options: {}) + # @param project_ids [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb index 89790c9a..ed9e10fe 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_create_response.rb @@ -30,18 +30,14 @@ class PermissionCreateResponse < OpenAI::Internal::Type::BaseModel # @return [String] required :project_id, String - # @!parse - # # The `checkpoint.permission` object represents a permission for a fine-tuned - # # model checkpoint. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param project_id [String] - # # @param object [Symbol, :"checkpoint.permission"] - # # - # def initialize(id:, created_at:, project_id:, object: :"checkpoint.permission", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, project_id:, object: :"checkpoint.permission") + # The `checkpoint.permission` object represents a permission for a fine-tuned + # model checkpoint. + # + # @param id [String] + # @param created_at [Integer] + # @param project_id [String] + # @param object [Symbol, :"checkpoint.permission"] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb index 0b049049..7281cf70 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb @@ -10,12 +10,8 @@ class PermissionDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb index 3a15c6da..1fce739b 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_delete_response.rb @@ -24,14 +24,10 @@ class PermissionDeleteResponse < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"checkpoint.permission"] required :object, const: :"checkpoint.permission" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"checkpoint.permission"] - # # - # def initialize(id:, deleted:, object: :"checkpoint.permission", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"checkpoint.permission") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"checkpoint.permission"] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb index e32c1188..29fffbe9 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb @@ -50,16 +50,12 @@ class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :project_id - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] - # # @param project_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, order: nil, project_id: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, order: nil, project_id: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] + # @param project_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The order in which to retrieve permissions. module Order @@ -68,11 +64,8 @@ module Order ASCENDING = :ascending DESCENDING = :descending - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb index 6f7cadc9..1de51fee 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_response.rb @@ -32,16 +32,12 @@ class PermissionRetrieveResponse < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :last_id, String, nil?: true - # @!parse - # # @param data [Array] - # # @param has_more [Boolean] - # # @param first_id [String, nil] - # # @param last_id [String, nil] - # # @param object [Symbol, :list] - # # - # def initialize(data:, has_more:, first_id: nil, last_id: nil, object: :list, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, has_more:, first_id: nil, last_id: nil, object: :list) + # @param data [Array] + # @param has_more [Boolean] + # @param first_id [String, nil] + # @param last_id [String, nil] + # @param object [Symbol, :list] class Data < OpenAI::Internal::Type::BaseModel # @!attribute id @@ -68,18 +64,14 @@ class Data < OpenAI::Internal::Type::BaseModel # @return [String] required :project_id, String - # @!parse - # # The `checkpoint.permission` object represents a permission for a fine-tuned - # # model checkpoint. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param project_id [String] - # # @param object [Symbol, :"checkpoint.permission"] - # # - # def initialize(id:, created_at:, project_id:, object: :"checkpoint.permission", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, project_id:, object: :"checkpoint.permission") + # The `checkpoint.permission` object represents a permission for a fine-tuned + # model checkpoint. + # + # @param id [String] + # @param created_at [Integer] + # @param project_id [String] + # @param object [Symbol, :"checkpoint.permission"] end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index b92146f5..27a10624 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -142,56 +142,29 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method] # attr_writer :method_ - # @!parse - # # The `fine_tuning.job` object represents a fine-tuning job that has been created - # # through the API. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param error [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] - # # @param fine_tuned_model [String, nil] - # # @param finished_at [Integer, nil] - # # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] - # # @param model [String] - # # @param organization_id [String] - # # @param result_files [Array] - # # @param seed [Integer] - # # @param status [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] - # # @param trained_tokens [Integer, nil] - # # @param training_file [String] - # # @param validation_file [String, nil] - # # @param estimated_finish [Integer, nil] - # # @param integrations [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param method_ [OpenAI::Models::FineTuning::FineTuningJob::Method] - # # @param object [Symbol, :"fine_tuning.job"] - # # - # def initialize( - # id:, - # created_at:, - # error:, - # fine_tuned_model:, - # finished_at:, - # hyperparameters:, - # model:, - # organization_id:, - # result_files:, - # seed:, - # status:, - # trained_tokens:, - # training_file:, - # validation_file:, - # estimated_finish: nil, - # integrations: nil, - # metadata: nil, - # method_: nil, - # object: :"fine_tuning.job", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, error:, fine_tuned_model:, finished_at:, hyperparameters:, model:, organization_id:, result_files:, seed:, status:, trained_tokens:, training_file:, validation_file:, estimated_finish: nil, integrations: nil, metadata: nil, method_: nil, object: :"fine_tuning.job") + # The `fine_tuning.job` object represents a fine-tuning job that has been created + # through the API. + # + # @param id [String] + # @param created_at [Integer] + # @param error [OpenAI::Models::FineTuning::FineTuningJob::Error, nil] + # @param fine_tuned_model [String, nil] + # @param finished_at [Integer, nil] + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters] + # @param model [String] + # @param organization_id [String] + # @param result_files [Array] + # @param seed [Integer] + # @param status [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Status] + # @param trained_tokens [Integer, nil] + # @param training_file [String] + # @param validation_file [String, nil] + # @param estimated_finish [Integer, nil] + # @param integrations [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param method_ [OpenAI::Models::FineTuning::FineTuningJob::Method] + # @param object [Symbol, :"fine_tuning.job"] # @see OpenAI::Models::FineTuning::FineTuningJob#error class Error < OpenAI::Internal::Type::BaseModel @@ -214,17 +187,13 @@ class Error < OpenAI::Internal::Type::BaseModel # @return [String, nil] required :param, String, nil?: true - # @!parse - # # For fine-tuning jobs that have `failed`, this will contain more information on - # # the cause of the failure. - # # - # # @param code [String] - # # @param message [String] - # # @param param [String, nil] - # # - # def initialize(code:, message:, param:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:, param:) + # For fine-tuning jobs that have `failed`, this will contain more information on + # the cause of the failure. + # + # @param code [String] + # @param message [String] + # @param param [String, nil] end # @see OpenAI::Models::FineTuning::FineTuningJob#hyperparameters @@ -263,17 +232,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. This value will only be - # # returned when running `supervised` jobs. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. This value will only be + # returned when running `supervised` jobs. + # + # @param batch_size [Symbol, :auto, Integer] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -286,9 +251,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -302,9 +266,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -318,9 +281,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end @@ -338,11 +300,8 @@ module Status FAILED = :failed CANCELLED = :cancelled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::FineTuning::FineTuningJob#method_ @@ -377,16 +336,12 @@ class Method < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] # attr_writer :type - # @!parse - # # The method used for fine-tuning. - # # - # # @param dpo [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo] - # # @param supervised [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised] - # # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] - # # - # def initialize(dpo: nil, supervised: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(dpo: nil, supervised: nil, type: nil) + # The method used for fine-tuning. + # + # @param dpo [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo] + # @param supervised [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised] + # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] # @see OpenAI::Models::FineTuning::FineTuningJob::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel @@ -400,14 +355,10 @@ class Dpo < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] # attr_writer :hyperparameters - # @!parse - # # Configuration for the DPO fine-tuning method. - # # - # # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] - # # - # def initialize(hyperparameters: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(hyperparameters: nil) + # Configuration for the DPO fine-tuning method. + # + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -459,17 +410,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param beta [Symbol, :auto, Float] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] + # @param beta [Symbol, :auto, Float] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -482,9 +429,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # The beta value for the DPO method. A higher beta value will increase the weight @@ -498,9 +444,8 @@ module Beta variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -514,9 +459,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -530,9 +474,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end end @@ -550,14 +493,10 @@ class Supervised < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] # attr_writer :hyperparameters - # @!parse - # # Configuration for the supervised fine-tuning method. - # # - # # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] - # # - # def initialize(hyperparameters: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(hyperparameters: nil) + # Configuration for the supervised fine-tuning method. + # + # @param hyperparameters [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -597,16 +536,12 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -619,9 +554,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -635,9 +569,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -651,9 +584,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end end @@ -667,11 +599,8 @@ module Type SUPERVISED = :supervised DPO = :dpo - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index a354bb5a..f647416e 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -55,20 +55,16 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] # attr_writer :type - # @!parse - # # Fine-tuning job event object - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param level [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] - # # @param message [String] - # # @param data [Object] - # # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] - # # @param object [Symbol, :"fine_tuning.job.event"] - # # - # def initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event") + # Fine-tuning job event object + # + # @param id [String] + # @param created_at [Integer] + # @param level [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Level] + # @param message [String] + # @param data [Object] + # @param type [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] + # @param object [Symbol, :"fine_tuning.job.event"] # The log level of the event. # @@ -80,11 +76,8 @@ module Level WARN = :warn ERROR = :error - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of event. @@ -96,11 +89,8 @@ module Type MESSAGE = :message METRICS = :metrics - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb index 603de792..b1a0ba4f 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb @@ -37,20 +37,16 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tags - # @!parse - # # The settings for your integration with Weights and Biases. This payload - # # specifies the project that metrics will be sent to. Optionally, you can set an - # # explicit display name for your run, add tags to your run, and set a default - # # entity (team, username, etc) to be associated with your run. - # # - # # @param project [String] - # # @param entity [String, nil] - # # @param name [String, nil] - # # @param tags [Array] - # # - # def initialize(project:, entity: nil, name: nil, tags: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(project:, entity: nil, name: nil, tags: nil) + # The settings for your integration with Weights and Biases. This payload + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. + # + # @param project [String] + # @param entity [String, nil] + # @param name [String, nil] + # @param tags [Array] end end diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb index 3554c944..8d9da11d 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration_object.rb @@ -19,13 +19,9 @@ class FineTuningJobWandbIntegrationObject < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] required :wandb, -> { OpenAI::Models::FineTuning::FineTuningJobWandbIntegration } - # @!parse - # # @param wandb [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] - # # @param type [Symbol, :wandb] - # # - # def initialize(wandb:, type: :wandb, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(wandb:, type: :wandb) + # @param wandb [OpenAI::Models::FineTuning::FineTuningJobWandbIntegration] + # @param type [Symbol, :wandb] end end diff --git a/lib/openai/models/fine_tuning/job_cancel_params.rb b/lib/openai/models/fine_tuning/job_cancel_params.rb index 92eda537..129f8e75 100644 --- a/lib/openai/models/fine_tuning/job_cancel_params.rb +++ b/lib/openai/models/fine_tuning/job_cancel_params.rb @@ -9,12 +9,8 @@ class JobCancelParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index df3de345..e27eb79c 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -113,35 +113,17 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :validation_file, String, nil?: true - # @!parse - # # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] - # # @param training_file [String] - # # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] - # # @param integrations [Array, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] - # # @param seed [Integer, nil] - # # @param suffix [String, nil] - # # @param validation_file [String, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # model:, - # training_file:, - # hyperparameters: nil, - # integrations: nil, - # metadata: nil, - # method_: nil, - # seed: nil, - # suffix: nil, - # validation_file: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(model:, training_file:, hyperparameters: nil, integrations: nil, metadata: nil, method_: nil, seed: nil, suffix: nil, validation_file: nil, request_options: {}) + # @param model [String, Symbol, OpenAI::Models::FineTuning::JobCreateParams::Model] + # @param training_file [String] + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] + # @param integrations [Array, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param method_ [OpenAI::Models::FineTuning::JobCreateParams::Method] + # @param seed [Integer, nil] + # @param suffix [String, nil] + # @param validation_file [String, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The name of the model to fine-tune. You can select one of the # [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned). @@ -158,9 +140,8 @@ module Model variant const: -> { OpenAI::Models::FineTuning::JobCreateParams::Model::GPT_4O_MINI } - # @!parse - # # @return [Array(String, Symbol)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol)] # @!group @@ -209,17 +190,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. This value is now deprecated - # # in favor of `method`, and should be passed in under the `method` parameter. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. This value is now deprecated + # in favor of `method`, and should be passed in under the `method` parameter. + # + # @param batch_size [Symbol, :auto, Integer] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -232,9 +209,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -248,9 +224,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -264,9 +239,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end @@ -287,13 +261,9 @@ class Integration < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] required :wandb, -> { OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb } - # @!parse - # # @param wandb [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] - # # @param type [Symbol, :wandb] - # # - # def initialize(wandb:, type: :wandb, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(wandb:, type: :wandb) + # @param wandb [OpenAI::Models::FineTuning::JobCreateParams::Integration::Wandb] + # @param type [Symbol, :wandb] # @see OpenAI::Models::FineTuning::JobCreateParams::Integration#wandb class Wandb < OpenAI::Internal::Type::BaseModel @@ -330,20 +300,16 @@ class Wandb < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :tags - # @!parse - # # The settings for your integration with Weights and Biases. This payload - # # specifies the project that metrics will be sent to. Optionally, you can set an - # # explicit display name for your run, add tags to your run, and set a default - # # entity (team, username, etc) to be associated with your run. - # # - # # @param project [String] - # # @param entity [String, nil] - # # @param name [String, nil] - # # @param tags [Array] - # # - # def initialize(project:, entity: nil, name: nil, tags: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(project:, entity: nil, name: nil, tags: nil) + # The settings for your integration with Weights and Biases. This payload + # specifies the project that metrics will be sent to. Optionally, you can set an + # explicit display name for your run, add tags to your run, and set a default + # entity (team, username, etc) to be associated with your run. + # + # @param project [String] + # @param entity [String, nil] + # @param name [String, nil] + # @param tags [Array] end end @@ -378,16 +344,12 @@ class Method < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] # attr_writer :type - # @!parse - # # The method used for fine-tuning. - # # - # # @param dpo [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo] - # # @param supervised [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised] - # # @param type [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] - # # - # def initialize(dpo: nil, supervised: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(dpo: nil, supervised: nil, type: nil) + # The method used for fine-tuning. + # + # @param dpo [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo] + # @param supervised [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised] + # @param type [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] # @see OpenAI::Models::FineTuning::JobCreateParams::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel @@ -402,14 +364,10 @@ class Dpo < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] # attr_writer :hyperparameters - # @!parse - # # Configuration for the DPO fine-tuning method. - # # - # # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] - # # - # def initialize(hyperparameters: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(hyperparameters: nil) + # Configuration for the DPO fine-tuning method. + # + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -461,17 +419,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param beta [Symbol, :auto, Float] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] + # @param beta [Symbol, :auto, Float] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -484,9 +438,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # The beta value for the DPO method. A higher beta value will increase the weight @@ -500,9 +453,8 @@ module Beta variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -516,9 +468,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -532,9 +483,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end end @@ -552,14 +502,10 @@ class Supervised < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] # attr_writer :hyperparameters - # @!parse - # # Configuration for the supervised fine-tuning method. - # # - # # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] - # # - # def initialize(hyperparameters: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(hyperparameters: nil) + # Configuration for the supervised fine-tuning method. + # + # @param hyperparameters [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel @@ -599,16 +545,12 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel # # @return [Symbol, :auto, Integer] # attr_writer :n_epochs - # @!parse - # # The hyperparameters used for the fine-tuning job. - # # - # # @param batch_size [Symbol, :auto, Integer] - # # @param learning_rate_multiplier [Symbol, :auto, Float] - # # @param n_epochs [Symbol, :auto, Integer] - # # - # def initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) + # The hyperparameters used for the fine-tuning job. + # + # @param batch_size [Symbol, :auto, Integer] + # @param learning_rate_multiplier [Symbol, :auto, Float] + # @param n_epochs [Symbol, :auto, Integer] # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. @@ -621,9 +563,8 @@ module BatchSize variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end # Scaling factor for the learning rate. A smaller learning rate may be useful to @@ -637,9 +578,8 @@ module LearningRateMultiplier variant Float - # @!parse - # # @return [Array(Symbol, :auto, Float)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Float)] end # The number of epochs to train the model for. An epoch refers to one full cycle @@ -653,9 +593,8 @@ module NEpochs variant Integer - # @!parse - # # @return [Array(Symbol, :auto, Integer)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, :auto, Integer)] end end end @@ -669,11 +608,8 @@ module Type SUPERVISED = :supervised DPO = :dpo - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/fine_tuning/job_list_events_params.rb b/lib/openai/models/fine_tuning/job_list_events_params.rb index d69f7e6e..d4729ee6 100644 --- a/lib/openai/models/fine_tuning/job_list_events_params.rb +++ b/lib/openai/models/fine_tuning/job_list_events_params.rb @@ -29,14 +29,10 @@ class JobListEventsParams < OpenAI::Internal::Type::BaseModel # # @return [Integer] # attr_writer :limit - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index 1ca2c3a3..e2ed96ce 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -36,15 +36,11 @@ class JobListParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, metadata: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, metadata: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param metadata [Hash{Symbol=>String}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/job_retrieve_params.rb b/lib/openai/models/fine_tuning/job_retrieve_params.rb index 3fa511ac..b1579373 100644 --- a/lib/openai/models/fine_tuning/job_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/job_retrieve_params.rb @@ -9,12 +9,8 @@ class JobRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb index fc91cb4e..f4f1bea1 100644 --- a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb +++ b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb @@ -30,14 +30,10 @@ class CheckpointListParams < OpenAI::Internal::Type::BaseModel # # @return [Integer] # attr_writer :limit - # @!parse - # # @param after [String] - # # @param limit [Integer] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, limit: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, limit: nil, request_options: {}) + # @param after [String] + # @param limit [Integer] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index e32a2926..98dbd856 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -48,32 +48,17 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # @return [Integer] required :step_number, Integer - # @!parse - # # The `fine_tuning.job.checkpoint` object represents a model checkpoint for a - # # fine-tuning job that is ready to use. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param fine_tuned_model_checkpoint [String] - # # @param fine_tuning_job_id [String] - # # @param metrics [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] - # # @param step_number [Integer] - # # @param object [Symbol, :"fine_tuning.job.checkpoint"] - # # - # def initialize( - # id:, - # created_at:, - # fine_tuned_model_checkpoint:, - # fine_tuning_job_id:, - # metrics:, - # step_number:, - # object: :"fine_tuning.job.checkpoint", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, fine_tuned_model_checkpoint:, fine_tuning_job_id:, metrics:, step_number:, object: :"fine_tuning.job.checkpoint") + # The `fine_tuning.job.checkpoint` object represents a model checkpoint for a + # fine-tuning job that is ready to use. + # + # @param id [String] + # @param created_at [Integer] + # @param fine_tuned_model_checkpoint [String] + # @param fine_tuning_job_id [String] + # @param metrics [OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint::Metrics] + # @param step_number [Integer] + # @param object [Symbol, :"fine_tuning.job.checkpoint"] # @see OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint#metrics class Metrics < OpenAI::Internal::Type::BaseModel @@ -140,31 +125,16 @@ class Metrics < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :valid_mean_token_accuracy - # @!parse - # # Metrics at the step number during the fine-tuning job. - # # - # # @param full_valid_loss [Float] - # # @param full_valid_mean_token_accuracy [Float] - # # @param step [Float] - # # @param train_loss [Float] - # # @param train_mean_token_accuracy [Float] - # # @param valid_loss [Float] - # # @param valid_mean_token_accuracy [Float] - # # - # def initialize( - # full_valid_loss: nil, - # full_valid_mean_token_accuracy: nil, - # step: nil, - # train_loss: nil, - # train_mean_token_accuracy: nil, - # valid_loss: nil, - # valid_mean_token_accuracy: nil, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(full_valid_loss: nil, full_valid_mean_token_accuracy: nil, step: nil, train_loss: nil, train_mean_token_accuracy: nil, valid_loss: nil, valid_mean_token_accuracy: nil) + # Metrics at the step number during the fine-tuning job. + # + # @param full_valid_loss [Float] + # @param full_valid_mean_token_accuracy [Float] + # @param step [Float] + # @param train_loss [Float] + # @param train_mean_token_accuracy [Float] + # @param valid_loss [Float] + # @param valid_mean_token_accuracy [Float] end end end diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index 7bd7f7d5..a37c41bc 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -47,15 +47,11 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :strict, OpenAI::Internal::Type::Boolean, nil?: true - # @!parse - # # @param name [String] - # # @param description [String] - # # @param parameters [Hash{Symbol=>Object}] - # # @param strict [Boolean, nil] - # # - # def initialize(name:, description: nil, parameters: nil, strict: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, description: nil, parameters: nil, strict: nil) + # @param name [String] + # @param description [String] + # @param parameters [Hash{Symbol=>Object}] + # @param strict [Boolean, nil] end end end diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index dddf163d..7d54b273 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -35,16 +35,12 @@ class Image < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :url - # @!parse - # # Represents the url or the content of an image generated by the OpenAI API. - # # - # # @param b64_json [String] - # # @param revised_prompt [String] - # # @param url [String] - # # - # def initialize(b64_json: nil, revised_prompt: nil, url: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(b64_json: nil, revised_prompt: nil, url: nil) + # Represents the url or the content of an image generated by the OpenAI API. + # + # @param b64_json [String] + # @param revised_prompt [String] + # @param url [String] end end end diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index b51d4723..d2e09d8f 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -58,18 +58,14 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param image [Pathname, StringIO] - # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # # @param n [Integer, nil] - # # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] - # # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @param image [Pathname, StringIO] + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param n [Integer, nil] + # @param response_format [Symbol, OpenAI::Models::ImageCreateVariationParams::ResponseFormat, nil] + # @param size [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The model to use for image generation. Only `dall-e-2` is supported at this # time. @@ -81,9 +77,8 @@ module Model # The model to use for image generation. Only `dall-e-2` is supported at this time. variant enum: -> { OpenAI::Models::ImageModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # The format in which the generated images are returned. Must be one of `url` or @@ -95,11 +90,8 @@ module ResponseFormat URL = :url B64_JSON = :b64_json - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -111,11 +103,8 @@ module Size SIZE_512X512 = :"512x512" SIZE_1024X1024 = :"1024x1024" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index c2487b6f..247e370b 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -74,33 +74,16 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param image [Pathname, StringIO] - # # @param prompt [String] - # # @param mask [Pathname, StringIO] - # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # # @param n [Integer, nil] - # # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] - # # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # image:, - # prompt:, - # mask: nil, - # model: nil, - # n: nil, - # response_format: nil, - # size: nil, - # user: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image:, prompt:, mask: nil, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) + # @param image [Pathname, StringIO] + # @param prompt [String] + # @param mask [Pathname, StringIO] + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param n [Integer, nil] + # @param response_format [Symbol, OpenAI::Models::ImageEditParams::ResponseFormat, nil] + # @param size [Symbol, OpenAI::Models::ImageEditParams::Size, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The model to use for image generation. Only `dall-e-2` is supported at this # time. @@ -112,9 +95,8 @@ module Model # The model to use for image generation. Only `dall-e-2` is supported at this time. variant enum: -> { OpenAI::Models::ImageModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # The format in which the generated images are returned. Must be one of `url` or @@ -126,11 +108,8 @@ module ResponseFormat URL = :url B64_JSON = :b64_json - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -142,11 +121,8 @@ module Size SIZE_512X512 = :"512x512" SIZE_1024X1024 = :"1024x1024" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index bce729c4..43b701b6 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -77,33 +77,16 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param prompt [String] - # # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] - # # @param n [Integer, nil] - # # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality] - # # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] - # # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] - # # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # prompt:, - # model: nil, - # n: nil, - # quality: nil, - # response_format: nil, - # size: nil, - # style: nil, - # user: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(prompt:, model: nil, n: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) + # @param prompt [String] + # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] + # @param n [Integer, nil] + # @param quality [Symbol, OpenAI::Models::ImageGenerateParams::Quality] + # @param response_format [Symbol, OpenAI::Models::ImageGenerateParams::ResponseFormat, nil] + # @param size [Symbol, OpenAI::Models::ImageGenerateParams::Size, nil] + # @param style [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The model to use for image generation. module Model @@ -114,9 +97,8 @@ module Model # The model to use for image generation. variant enum: -> { OpenAI::Models::ImageModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ImageModel)] end # The quality of the image that will be generated. `hd` creates images with finer @@ -128,11 +110,8 @@ module Quality STANDARD = :standard HD = :hd - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The format in which the generated images are returned. Must be one of `url` or @@ -144,11 +123,8 @@ module ResponseFormat URL = :url B64_JSON = :b64_json - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The size of the generated images. Must be one of `256x256`, `512x512`, or @@ -163,11 +139,8 @@ module Size SIZE_1792X1024 = :"1792x1024" SIZE_1024X1792 = :"1024x1792" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The style of the generated images. Must be one of `vivid` or `natural`. Vivid @@ -180,11 +153,8 @@ module Style VIVID = :vivid NATURAL = :natural - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/image_model.rb b/lib/openai/models/image_model.rb index 83fd2f56..4b6ca64c 100644 --- a/lib/openai/models/image_model.rb +++ b/lib/openai/models/image_model.rb @@ -8,11 +8,8 @@ module ImageModel DALL_E_2 = :"dall-e-2" DALL_E_3 = :"dall-e-3" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/images_response.rb b/lib/openai/models/images_response.rb index 108b9120..8816ee07 100644 --- a/lib/openai/models/images_response.rb +++ b/lib/openai/models/images_response.rb @@ -14,13 +14,9 @@ class ImagesResponse < OpenAI::Internal::Type::BaseModel # @return [Array] required :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Image] } - # @!parse - # # @param created [Integer] - # # @param data [Array] - # # - # def initialize(created:, data:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(created:, data:) + # @param created [Integer] + # @param data [Array] end end end diff --git a/lib/openai/models/model.rb b/lib/openai/models/model.rb index a3d362fb..e5893b7c 100644 --- a/lib/openai/models/model.rb +++ b/lib/openai/models/model.rb @@ -28,17 +28,13 @@ class Model < OpenAI::Internal::Type::BaseModel # @return [String] required :owned_by, String - # @!parse - # # Describes an OpenAI model offering that can be used with the API. - # # - # # @param id [String] - # # @param created [Integer] - # # @param owned_by [String] - # # @param object [Symbol, :model] - # # - # def initialize(id:, created:, owned_by:, object: :model, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created:, owned_by:, object: :model) + # Describes an OpenAI model offering that can be used with the API. + # + # @param id [String] + # @param created [Integer] + # @param owned_by [String] + # @param object [Symbol, :model] end end end diff --git a/lib/openai/models/model_delete_params.rb b/lib/openai/models/model_delete_params.rb index 3f4036d0..f288614b 100644 --- a/lib/openai/models/model_delete_params.rb +++ b/lib/openai/models/model_delete_params.rb @@ -8,12 +8,8 @@ class ModelDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/model_deleted.rb b/lib/openai/models/model_deleted.rb index 82476c3e..612bc76c 100644 --- a/lib/openai/models/model_deleted.rb +++ b/lib/openai/models/model_deleted.rb @@ -19,14 +19,10 @@ class ModelDeleted < OpenAI::Internal::Type::BaseModel # @return [String] required :object, String - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [String] - # # - # def initialize(id:, deleted:, object:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object:) + # @param id [String] + # @param deleted [Boolean] + # @param object [String] end end end diff --git a/lib/openai/models/model_list_params.rb b/lib/openai/models/model_list_params.rb index c7c05067..52c1d783 100644 --- a/lib/openai/models/model_list_params.rb +++ b/lib/openai/models/model_list_params.rb @@ -8,12 +8,8 @@ class ModelListParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/model_retrieve_params.rb b/lib/openai/models/model_retrieve_params.rb index b1384fc8..c2d43bc2 100644 --- a/lib/openai/models/model_retrieve_params.rb +++ b/lib/openai/models/model_retrieve_params.rb @@ -8,12 +8,8 @@ class ModelRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/moderation.rb b/lib/openai/models/moderation.rb index 6abb069c..834ced17 100644 --- a/lib/openai/models/moderation.rb +++ b/lib/openai/models/moderation.rb @@ -27,15 +27,11 @@ class Moderation < OpenAI::Internal::Type::BaseModel # @return [Boolean] required :flagged, OpenAI::Internal::Type::Boolean - # @!parse - # # @param categories [OpenAI::Models::Moderation::Categories] - # # @param category_applied_input_types [OpenAI::Models::Moderation::CategoryAppliedInputTypes] - # # @param category_scores [OpenAI::Models::Moderation::CategoryScores] - # # @param flagged [Boolean] - # # - # def initialize(categories:, category_applied_input_types:, category_scores:, flagged:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(categories:, category_applied_input_types:, category_scores:, flagged:) + # @param categories [OpenAI::Models::Moderation::Categories] + # @param category_applied_input_types [OpenAI::Models::Moderation::CategoryAppliedInputTypes] + # @param category_scores [OpenAI::Models::Moderation::CategoryScores] + # @param flagged [Boolean] # @see OpenAI::Models::Moderation#categories class Categories < OpenAI::Internal::Type::BaseModel @@ -134,43 +130,22 @@ class Categories < OpenAI::Internal::Type::BaseModel # @return [Boolean] required :violence_graphic, OpenAI::Internal::Type::Boolean, api_name: :"violence/graphic" - # @!parse - # # A list of the categories, and whether they are flagged or not. - # # - # # @param harassment [Boolean] - # # @param harassment_threatening [Boolean] - # # @param hate [Boolean] - # # @param hate_threatening [Boolean] - # # @param illicit [Boolean, nil] - # # @param illicit_violent [Boolean, nil] - # # @param self_harm [Boolean] - # # @param self_harm_instructions [Boolean] - # # @param self_harm_intent [Boolean] - # # @param sexual [Boolean] - # # @param sexual_minors [Boolean] - # # @param violence [Boolean] - # # @param violence_graphic [Boolean] - # # - # def initialize( - # harassment:, - # harassment_threatening:, - # hate:, - # hate_threatening:, - # illicit:, - # illicit_violent:, - # self_harm:, - # self_harm_instructions:, - # self_harm_intent:, - # sexual:, - # sexual_minors:, - # violence:, - # violence_graphic:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) + # A list of the categories, and whether they are flagged or not. + # + # @param harassment [Boolean] + # @param harassment_threatening [Boolean] + # @param hate [Boolean] + # @param hate_threatening [Boolean] + # @param illicit [Boolean, nil] + # @param illicit_violent [Boolean, nil] + # @param self_harm [Boolean] + # @param self_harm_instructions [Boolean] + # @param self_harm_intent [Boolean] + # @param sexual [Boolean] + # @param sexual_minors [Boolean] + # @param violence [Boolean] + # @param violence_graphic [Boolean] end # @see OpenAI::Models::Moderation#category_applied_input_types @@ -274,54 +249,30 @@ class CategoryAppliedInputTypes < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Moderation::CategoryAppliedInputTypes::ViolenceGraphic] }, api_name: :"violence/graphic" - # @!parse - # # A list of the categories along with the input type(s) that the score applies to. - # # - # # @param harassment [Array] - # # @param harassment_threatening [Array] - # # @param hate [Array] - # # @param hate_threatening [Array] - # # @param illicit [Array] - # # @param illicit_violent [Array] - # # @param self_harm [Array] - # # @param self_harm_instructions [Array] - # # @param self_harm_intent [Array] - # # @param sexual [Array] - # # @param sexual_minors [Array] - # # @param violence [Array] - # # @param violence_graphic [Array] - # # - # def initialize( - # harassment:, - # harassment_threatening:, - # hate:, - # hate_threatening:, - # illicit:, - # illicit_violent:, - # self_harm:, - # self_harm_instructions:, - # self_harm_intent:, - # sexual:, - # sexual_minors:, - # violence:, - # violence_graphic:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) + # A list of the categories along with the input type(s) that the score applies to. + # + # @param harassment [Array] + # @param harassment_threatening [Array] + # @param hate [Array] + # @param hate_threatening [Array] + # @param illicit [Array] + # @param illicit_violent [Array] + # @param self_harm [Array] + # @param self_harm_instructions [Array] + # @param self_harm_intent [Array] + # @param sexual [Array] + # @param sexual_minors [Array] + # @param violence [Array] + # @param violence_graphic [Array] module Harassment extend OpenAI::Internal::Type::Enum TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module HarassmentThreatening @@ -329,11 +280,8 @@ module HarassmentThreatening TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module Hate @@ -341,11 +289,8 @@ module Hate TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module HateThreatening @@ -353,11 +298,8 @@ module HateThreatening TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module Illicit @@ -365,11 +307,8 @@ module Illicit TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module IllicitViolent @@ -377,11 +316,8 @@ module IllicitViolent TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module SelfHarm @@ -390,11 +326,8 @@ module SelfHarm TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module SelfHarmInstruction @@ -403,11 +336,8 @@ module SelfHarmInstruction TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module SelfHarmIntent @@ -416,11 +346,8 @@ module SelfHarmIntent TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module Sexual @@ -429,11 +356,8 @@ module Sexual TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module SexualMinor @@ -441,11 +365,8 @@ module SexualMinor TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module Violence @@ -454,11 +375,8 @@ module Violence TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module ViolenceGraphic @@ -467,11 +385,8 @@ module ViolenceGraphic TEXT = :text IMAGE = :image - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -555,43 +470,22 @@ class CategoryScores < OpenAI::Internal::Type::BaseModel # @return [Float] required :violence_graphic, Float, api_name: :"violence/graphic" - # @!parse - # # A list of the categories along with their scores as predicted by model. - # # - # # @param harassment [Float] - # # @param harassment_threatening [Float] - # # @param hate [Float] - # # @param hate_threatening [Float] - # # @param illicit [Float] - # # @param illicit_violent [Float] - # # @param self_harm [Float] - # # @param self_harm_instructions [Float] - # # @param self_harm_intent [Float] - # # @param sexual [Float] - # # @param sexual_minors [Float] - # # @param violence [Float] - # # @param violence_graphic [Float] - # # - # def initialize( - # harassment:, - # harassment_threatening:, - # hate:, - # hate_threatening:, - # illicit:, - # illicit_violent:, - # self_harm:, - # self_harm_instructions:, - # self_harm_intent:, - # sexual:, - # sexual_minors:, - # violence:, - # violence_graphic:, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(harassment:, harassment_threatening:, hate:, hate_threatening:, illicit:, illicit_violent:, self_harm:, self_harm_instructions:, self_harm_intent:, sexual:, sexual_minors:, violence:, violence_graphic:) + # A list of the categories along with their scores as predicted by model. + # + # @param harassment [Float] + # @param harassment_threatening [Float] + # @param hate [Float] + # @param hate_threatening [Float] + # @param illicit [Float] + # @param illicit_violent [Float] + # @param self_harm [Float] + # @param self_harm_instructions [Float] + # @param self_harm_intent [Float] + # @param sexual [Float] + # @param sexual_minors [Float] + # @param violence [Float] + # @param violence_graphic [Float] end end end diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 41230634..6bc57485 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -28,14 +28,10 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String, Symbol, OpenAI::Models::ModerationModel] # attr_writer :model - # @!parse - # # @param input [String, Array, Array] - # # @param model [String, Symbol, OpenAI::Models::ModerationModel] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(input:, model: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, model: nil, request_options: {}) + # @param input [String, Array, Array] + # @param model [String, Symbol, OpenAI::Models::ModerationModel] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Input (or inputs) to classify. Can be a single string, an array of strings, or # an array of multi-modal input objects similar to other models. @@ -51,9 +47,8 @@ module Input # An array of multi-modal inputs to the moderation model. variant -> { OpenAI::Models::ModerationCreateParams::Input::ModerationMultiModalInputArray } - # @!parse - # # @return [Array(String, Array, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array, Array)] StringArray = OpenAI::Internal::Type::ArrayOf[String] @@ -75,9 +70,8 @@ module Model # available models [here](https://platform.openai.com/docs/models#moderation). variant enum: -> { OpenAI::Models::ModerationModel } - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ModerationModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ModerationModel)] end end end diff --git a/lib/openai/models/moderation_create_response.rb b/lib/openai/models/moderation_create_response.rb index 922bca1e..45575319 100644 --- a/lib/openai/models/moderation_create_response.rb +++ b/lib/openai/models/moderation_create_response.rb @@ -22,16 +22,12 @@ class ModerationCreateResponse < OpenAI::Internal::Type::BaseModel # @return [Array] required :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Moderation] } - # @!parse - # # Represents if a given text input is potentially harmful. - # # - # # @param id [String] - # # @param model [String] - # # @param results [Array] - # # - # def initialize(id:, model:, results:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, model:, results:) + # Represents if a given text input is potentially harmful. + # + # @param id [String] + # @param model [String] + # @param results [Array] end end end diff --git a/lib/openai/models/moderation_image_url_input.rb b/lib/openai/models/moderation_image_url_input.rb index 25b0835a..f9fbd274 100644 --- a/lib/openai/models/moderation_image_url_input.rb +++ b/lib/openai/models/moderation_image_url_input.rb @@ -15,15 +15,11 @@ class ModerationImageURLInput < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_url] required :type, const: :image_url - # @!parse - # # An object describing an image to classify. - # # - # # @param image_url [OpenAI::Models::ModerationImageURLInput::ImageURL] - # # @param type [Symbol, :image_url] - # # - # def initialize(image_url:, type: :image_url, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(image_url:, type: :image_url) + # An object describing an image to classify. + # + # @param image_url [OpenAI::Models::ModerationImageURLInput::ImageURL] + # @param type [Symbol, :image_url] # @see OpenAI::Models::ModerationImageURLInput#image_url class ImageURL < OpenAI::Internal::Type::BaseModel @@ -33,14 +29,10 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!parse - # # Contains either an image URL or a data URL for a base64 encoded image. - # # - # # @param url [String] - # # - # def initialize(url:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(url:) + # Contains either an image URL or a data URL for a base64 encoded image. + # + # @param url [String] end end end diff --git a/lib/openai/models/moderation_model.rb b/lib/openai/models/moderation_model.rb index fa606daa..02d78035 100644 --- a/lib/openai/models/moderation_model.rb +++ b/lib/openai/models/moderation_model.rb @@ -10,11 +10,8 @@ module ModerationModel TEXT_MODERATION_LATEST = :"text-moderation-latest" TEXT_MODERATION_STABLE = :"text-moderation-stable" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/moderation_multi_modal_input.rb b/lib/openai/models/moderation_multi_modal_input.rb index ca2979e7..32f5923b 100644 --- a/lib/openai/models/moderation_multi_modal_input.rb +++ b/lib/openai/models/moderation_multi_modal_input.rb @@ -14,9 +14,8 @@ module ModerationMultiModalInput # An object describing text to classify. variant :text, -> { OpenAI::Models::ModerationTextInput } - # @!parse - # # @return [Array(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ModerationImageURLInput, OpenAI::Models::ModerationTextInput)] end end end diff --git a/lib/openai/models/moderation_text_input.rb b/lib/openai/models/moderation_text_input.rb index ec5fe7cb..2feaf23d 100644 --- a/lib/openai/models/moderation_text_input.rb +++ b/lib/openai/models/moderation_text_input.rb @@ -15,15 +15,11 @@ class ModerationTextInput < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!parse - # # An object describing text to classify. - # # - # # @param text [String] - # # @param type [Symbol, :text] - # # - # def initialize(text:, type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :text) + # An object describing text to classify. + # + # @param text [String] + # @param type [Symbol, :text] end end end diff --git a/lib/openai/models/other_file_chunking_strategy_object.rb b/lib/openai/models/other_file_chunking_strategy_object.rb index acb54ef8..862ae35e 100644 --- a/lib/openai/models/other_file_chunking_strategy_object.rb +++ b/lib/openai/models/other_file_chunking_strategy_object.rb @@ -9,16 +9,12 @@ class OtherFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @return [Symbol, :other] required :type, const: :other - # @!parse - # # This is returned when the chunking strategy is unknown. Typically, this is - # # because the file was indexed before the `chunking_strategy` concept was - # # introduced in the API. - # # - # # @param type [Symbol, :other] - # # - # def initialize(type: :other, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :other) + # This is returned when the chunking strategy is unknown. Typically, this is + # because the file was indexed before the `chunking_strategy` concept was + # introduced in the API. + # + # @param type [Symbol, :other] end end end diff --git a/lib/openai/models/reasoning.rb b/lib/openai/models/reasoning.rb index a5fa2bc4..5aa982ea 100644 --- a/lib/openai/models/reasoning.rb +++ b/lib/openai/models/reasoning.rb @@ -32,19 +32,15 @@ class Reasoning < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Reasoning::Summary, nil] optional :summary, enum: -> { OpenAI::Models::Reasoning::Summary }, nil?: true - # @!parse - # # **o-series models only** - # # - # # Configuration options for - # # [reasoning models](https://platform.openai.com/docs/guides/reasoning). - # # - # # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] - # # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] - # # @param summary [Symbol, OpenAI::Models::Reasoning::Summary, nil] - # # - # def initialize(effort: nil, generate_summary: nil, summary: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(effort: nil, generate_summary: nil, summary: nil) + # **o-series models only** + # + # Configuration options for + # [reasoning models](https://platform.openai.com/docs/guides/reasoning). + # + # @param effort [Symbol, OpenAI::Models::ReasoningEffort, nil] + # @param generate_summary [Symbol, OpenAI::Models::Reasoning::GenerateSummary, nil] + # @param summary [Symbol, OpenAI::Models::Reasoning::Summary, nil] # @deprecated # @@ -62,11 +58,8 @@ module GenerateSummary CONCISE = :concise DETAILED = :detailed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # A summary of the reasoning performed by the model. This can be useful for @@ -81,11 +74,8 @@ module Summary CONCISE = :concise DETAILED = :detailed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/reasoning_effort.rb b/lib/openai/models/reasoning_effort.rb index a7bb035e..f9990508 100644 --- a/lib/openai/models/reasoning_effort.rb +++ b/lib/openai/models/reasoning_effort.rb @@ -15,11 +15,8 @@ module ReasoningEffort MEDIUM = :medium HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/response_format_json_object.rb b/lib/openai/models/response_format_json_object.rb index 611237a3..52fc23ae 100644 --- a/lib/openai/models/response_format_json_object.rb +++ b/lib/openai/models/response_format_json_object.rb @@ -9,16 +9,12 @@ class ResponseFormatJSONObject < OpenAI::Internal::Type::BaseModel # @return [Symbol, :json_object] required :type, const: :json_object - # @!parse - # # JSON object response format. An older method of generating JSON responses. Using - # # `json_schema` is recommended for models that support it. Note that the model - # # will not generate JSON without a system or user message instructing it to do so. - # # - # # @param type [Symbol, :json_object] - # # - # def initialize(type: :json_object, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :json_object) + # JSON object response format. An older method of generating JSON responses. Using + # `json_schema` is recommended for models that support it. Note that the model + # will not generate JSON without a system or user message instructing it to do so. + # + # @param type [Symbol, :json_object] end end end diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 56f2e3b1..0e9e47fb 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -15,17 +15,13 @@ class ResponseFormatJSONSchema < OpenAI::Internal::Type::BaseModel # @return [Symbol, :json_schema] required :type, const: :json_schema - # @!parse - # # JSON Schema response format. Used to generate structured JSON responses. Learn - # # more about - # # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). - # # - # # @param json_schema [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] - # # @param type [Symbol, :json_schema] - # # - # def initialize(json_schema:, type: :json_schema, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(json_schema:, type: :json_schema) + # JSON Schema response format. Used to generate structured JSON responses. Learn + # more about + # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + # + # @param json_schema [OpenAI::Models::ResponseFormatJSONSchema::JSONSchema] + # @param type [Symbol, :json_schema] # @see OpenAI::Models::ResponseFormatJSONSchema#json_schema class JSONSchema < OpenAI::Internal::Type::BaseModel @@ -68,17 +64,13 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :strict, OpenAI::Internal::Type::Boolean, nil?: true - # @!parse - # # Structured Outputs configuration options, including a JSON Schema. - # # - # # @param name [String] - # # @param description [String] - # # @param schema [Hash{Symbol=>Object}] - # # @param strict [Boolean, nil] - # # - # def initialize(name:, description: nil, schema: nil, strict: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, description: nil, schema: nil, strict: nil) + # Structured Outputs configuration options, including a JSON Schema. + # + # @param name [String] + # @param description [String] + # @param schema [Hash{Symbol=>Object}] + # @param strict [Boolean, nil] end end end diff --git a/lib/openai/models/response_format_text.rb b/lib/openai/models/response_format_text.rb index 609679d6..8101bcca 100644 --- a/lib/openai/models/response_format_text.rb +++ b/lib/openai/models/response_format_text.rb @@ -9,14 +9,10 @@ class ResponseFormatText < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!parse - # # Default response format. Used to generate text responses. - # # - # # @param type [Symbol, :text] - # # - # def initialize(type: :text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :text) + # Default response format. Used to generate text responses. + # + # @param type [Symbol, :text] end end end diff --git a/lib/openai/models/responses/computer_tool.rb b/lib/openai/models/responses/computer_tool.rb index 3af1eabe..41a7499d 100644 --- a/lib/openai/models/responses/computer_tool.rb +++ b/lib/openai/models/responses/computer_tool.rb @@ -28,18 +28,14 @@ class ComputerTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, :computer_use_preview] required :type, const: :computer_use_preview - # @!parse - # # A tool that controls a virtual computer. Learn more about the - # # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). - # # - # # @param display_height [Float] - # # @param display_width [Float] - # # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] - # # @param type [Symbol, :computer_use_preview] - # # - # def initialize(display_height:, display_width:, environment:, type: :computer_use_preview, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(display_height:, display_width:, environment:, type: :computer_use_preview) + # A tool that controls a virtual computer. Learn more about the + # [computer tool](https://platform.openai.com/docs/guides/tools-computer-use). + # + # @param display_height [Float] + # @param display_width [Float] + # @param environment [Symbol, OpenAI::Models::Responses::ComputerTool::Environment] + # @param type [Symbol, :computer_use_preview] # The type of computer environment to control. # @@ -52,11 +48,8 @@ module Environment UBUNTU = :ubuntu BROWSER = :browser - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 68cbd033..9814987c 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -28,20 +28,16 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] # attr_writer :type - # @!parse - # # A message input to the model with a role indicating instruction following - # # hierarchy. Instructions given with the `developer` or `system` role take - # # precedence over instructions given with the `user` role. Messages with the - # # `assistant` role are presumed to have been generated by the model in previous - # # interactions. - # # - # # @param content [String, Array] - # # @param role [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] - # # @param type [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] - # # - # def initialize(content:, role:, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. Messages with the + # `assistant` role are presumed to have been generated by the model in previous + # interactions. + # + # @param content [String, Array] + # @param role [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] + # @param type [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] # Text, image, or audio input to the model, used to generate a response. Can also # contain previous assistant responses. @@ -57,9 +53,8 @@ module Content # types. variant -> { OpenAI::Models::Responses::ResponseInputMessageContentList } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] end # The role of the message input. One of `user`, `assistant`, `system`, or @@ -74,11 +69,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of the message input. Always `message`. @@ -89,11 +81,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 8fd1295e..8065a25a 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -47,20 +47,16 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::Responses::FileSearchTool::RankingOptions] # attr_writer :ranking_options - # @!parse - # # A tool that searches for relevant content from uploaded files. Learn more about - # # the - # # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). - # # - # # @param vector_store_ids [Array] - # # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # # @param max_num_results [Integer] - # # @param ranking_options [OpenAI::Models::Responses::FileSearchTool::RankingOptions] - # # @param type [Symbol, :file_search] - # # - # def initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search) + # A tool that searches for relevant content from uploaded files. Learn more about + # the + # [file search tool](https://platform.openai.com/docs/guides/tools-file-search). + # + # @param vector_store_ids [Array] + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] + # @param max_num_results [Integer] + # @param ranking_options [OpenAI::Models::Responses::FileSearchTool::RankingOptions] + # @param type [Symbol, :file_search] # A filter to apply based on file attributes. # @@ -74,9 +70,8 @@ module Filters # Combine multiple filters using `and` or `or`. variant -> { OpenAI::Models::CompoundFilter } - # @!parse - # # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] end # @see OpenAI::Models::Responses::FileSearchTool#ranking_options @@ -103,15 +98,11 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :score_threshold - # @!parse - # # Ranking options for search. - # # - # # @param ranker [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] - # # @param score_threshold [Float] - # # - # def initialize(ranker: nil, score_threshold: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(ranker: nil, score_threshold: nil) + # Ranking options for search. + # + # @param ranker [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] + # @param score_threshold [Float] # The ranker to use for the file search. # @@ -122,11 +113,8 @@ module Ranker AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/function_tool.rb b/lib/openai/models/responses/function_tool.rb index 8b2433dc..d0cf09ab 100644 --- a/lib/openai/models/responses/function_tool.rb +++ b/lib/openai/models/responses/function_tool.rb @@ -35,20 +35,16 @@ class FunctionTool < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :description, String, nil?: true - # @!parse - # # Defines a function in your own code the model can choose to call. Learn more - # # about - # # [function calling](https://platform.openai.com/docs/guides/function-calling). - # # - # # @param name [String] - # # @param parameters [Hash{Symbol=>Object}] - # # @param strict [Boolean] - # # @param description [String, nil] - # # @param type [Symbol, :function] - # # - # def initialize(name:, parameters:, strict:, description: nil, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, parameters:, strict:, description: nil, type: :function) + # Defines a function in your own code the model can choose to call. Learn more + # about + # [function calling](https://platform.openai.com/docs/guides/function-calling). + # + # @param name [String] + # @param parameters [Hash{Symbol=>Object}] + # @param strict [Boolean] + # @param description [String, nil] + # @param type [Symbol, :function] end end end diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index 7a4d136a..eabfd424 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -65,17 +65,13 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param before [String] - # # @param include [Array] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param include [Array] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # The order to return the input items in. Default is `asc`. # @@ -87,11 +83,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 0ed05b08..48793f7b 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -241,61 +241,30 @@ class Response < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param id [String] - # # @param created_at [Float] - # # @param error [OpenAI::Models::Responses::ResponseError, nil] - # # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] - # # @param instructions [String, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - # # @param output [Array] - # # @param parallel_tool_calls [Boolean] - # # @param temperature [Float, nil] - # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - # # @param tools [Array] - # # @param top_p [Float, nil] - # # @param max_output_tokens [Integer, nil] - # # @param previous_response_id [String, nil] - # # @param reasoning [OpenAI::Models::Reasoning, nil] - # # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] - # # @param text [OpenAI::Models::Responses::ResponseTextConfig] - # # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] - # # @param usage [OpenAI::Models::Responses::ResponseUsage] - # # @param user [String] - # # @param object [Symbol, :response] - # # - # def initialize( - # id:, - # created_at:, - # error:, - # incomplete_details:, - # instructions:, - # metadata:, - # model:, - # output:, - # parallel_tool_calls:, - # temperature:, - # tool_choice:, - # tools:, - # top_p:, - # max_output_tokens: nil, - # previous_response_id: nil, - # reasoning: nil, - # service_tier: nil, - # status: nil, - # text: nil, - # truncation: nil, - # usage: nil, - # user: nil, - # object: :response, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) + # @param id [String] + # @param created_at [Float] + # @param error [OpenAI::Models::Responses::ResponseError, nil] + # @param incomplete_details [OpenAI::Models::Responses::Response::IncompleteDetails, nil] + # @param instructions [String, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] + # @param output [Array] + # @param parallel_tool_calls [Boolean] + # @param temperature [Float, nil] + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + # @param tools [Array] + # @param top_p [Float, nil] + # @param max_output_tokens [Integer, nil] + # @param previous_response_id [String, nil] + # @param reasoning [OpenAI::Models::Reasoning, nil] + # @param service_tier [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] + # @param status [Symbol, OpenAI::Models::Responses::ResponseStatus] + # @param text [OpenAI::Models::Responses::ResponseTextConfig] + # @param truncation [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] + # @param usage [OpenAI::Models::Responses::ResponseUsage] + # @param user [String] + # @param object [Symbol, :response] # @see OpenAI::Models::Responses::Response#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel @@ -309,14 +278,10 @@ class IncompleteDetails < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] # attr_writer :reason - # @!parse - # # Details about why the response is incomplete. - # # - # # @param reason [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] - # # - # def initialize(reason: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(reason: nil) + # Details about why the response is incomplete. + # + # @param reason [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] # The reason why the response is incomplete. # @@ -327,11 +292,8 @@ module Reason MAX_OUTPUT_TOKENS = :max_output_tokens CONTENT_FILTER = :content_filter - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -360,9 +322,8 @@ module ToolChoice # Use this option to force the model to call a specific function. variant -> { OpenAI::Models::Responses::ToolChoiceFunction } - # @!parse - # # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -391,11 +352,8 @@ module ServiceTier DEFAULT = :default FLEX = :flex - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The truncation strategy to use for the model response. @@ -413,11 +371,8 @@ module Truncation AUTO = :auto DISABLED = :disabled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_audio_delta_event.rb b/lib/openai/models/responses/response_audio_delta_event.rb index 72fd5781..45ede8ad 100644 --- a/lib/openai/models/responses/response_audio_delta_event.rb +++ b/lib/openai/models/responses/response_audio_delta_event.rb @@ -16,15 +16,11 @@ class ResponseAudioDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.audio.delta"] required :type, const: :"response.audio.delta" - # @!parse - # # Emitted when there is a partial audio response. - # # - # # @param delta [String] - # # @param type [Symbol, :"response.audio.delta"] - # # - # def initialize(delta:, type: :"response.audio.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, type: :"response.audio.delta") + # Emitted when there is a partial audio response. + # + # @param delta [String] + # @param type [Symbol, :"response.audio.delta"] end end end diff --git a/lib/openai/models/responses/response_audio_done_event.rb b/lib/openai/models/responses/response_audio_done_event.rb index 6dd92e68..359f11b3 100644 --- a/lib/openai/models/responses/response_audio_done_event.rb +++ b/lib/openai/models/responses/response_audio_done_event.rb @@ -10,14 +10,10 @@ class ResponseAudioDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.audio.done"] required :type, const: :"response.audio.done" - # @!parse - # # Emitted when the audio response is complete. - # # - # # @param type [Symbol, :"response.audio.done"] - # # - # def initialize(type: :"response.audio.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :"response.audio.done") + # Emitted when the audio response is complete. + # + # @param type [Symbol, :"response.audio.done"] end end end diff --git a/lib/openai/models/responses/response_audio_transcript_delta_event.rb b/lib/openai/models/responses/response_audio_transcript_delta_event.rb index 062c5a76..48f70a9d 100644 --- a/lib/openai/models/responses/response_audio_transcript_delta_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_delta_event.rb @@ -16,15 +16,11 @@ class ResponseAudioTranscriptDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.audio.transcript.delta"] required :type, const: :"response.audio.transcript.delta" - # @!parse - # # Emitted when there is a partial transcript of audio. - # # - # # @param delta [String] - # # @param type [Symbol, :"response.audio.transcript.delta"] - # # - # def initialize(delta:, type: :"response.audio.transcript.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, type: :"response.audio.transcript.delta") + # Emitted when there is a partial transcript of audio. + # + # @param delta [String] + # @param type [Symbol, :"response.audio.transcript.delta"] end end end diff --git a/lib/openai/models/responses/response_audio_transcript_done_event.rb b/lib/openai/models/responses/response_audio_transcript_done_event.rb index 98399c6e..5abf997b 100644 --- a/lib/openai/models/responses/response_audio_transcript_done_event.rb +++ b/lib/openai/models/responses/response_audio_transcript_done_event.rb @@ -10,14 +10,10 @@ class ResponseAudioTranscriptDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.audio.transcript.done"] required :type, const: :"response.audio.transcript.done" - # @!parse - # # Emitted when the full audio transcript is completed. - # # - # # @param type [Symbol, :"response.audio.transcript.done"] - # # - # def initialize(type: :"response.audio.transcript.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :"response.audio.transcript.done") + # Emitted when the full audio transcript is completed. + # + # @param type [Symbol, :"response.audio.transcript.done"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb index 2fa390fe..934cb5f9 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_delta_event.rb @@ -22,16 +22,12 @@ class ResponseCodeInterpreterCallCodeDeltaEvent < OpenAI::Internal::Type::BaseMo # @return [Symbol, :"response.code_interpreter_call.code.delta"] required :type, const: :"response.code_interpreter_call.code.delta" - # @!parse - # # Emitted when a partial code snippet is added by the code interpreter. - # # - # # @param delta [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.code_interpreter_call.code.delta"] - # # - # def initialize(delta:, output_index:, type: :"response.code_interpreter_call.code.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, output_index:, type: :"response.code_interpreter_call.code.delta") + # Emitted when a partial code snippet is added by the code interpreter. + # + # @param delta [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.code_interpreter_call.code.delta"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb index caa291b9..8b10fde8 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_code_done_event.rb @@ -22,16 +22,12 @@ class ResponseCodeInterpreterCallCodeDoneEvent < OpenAI::Internal::Type::BaseMod # @return [Symbol, :"response.code_interpreter_call.code.done"] required :type, const: :"response.code_interpreter_call.code.done" - # @!parse - # # Emitted when code snippet output is finalized by the code interpreter. - # # - # # @param code [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.code_interpreter_call.code.done"] - # # - # def initialize(code:, output_index:, type: :"response.code_interpreter_call.code.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, output_index:, type: :"response.code_interpreter_call.code.done") + # Emitted when code snippet output is finalized by the code interpreter. + # + # @param code [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.code_interpreter_call.code.done"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb index 0e13a5e9..c0e507cd 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_completed_event.rb @@ -22,16 +22,12 @@ class ResponseCodeInterpreterCallCompletedEvent < OpenAI::Internal::Type::BaseMo # @return [Symbol, :"response.code_interpreter_call.completed"] required :type, const: :"response.code_interpreter_call.completed" - # @!parse - # # Emitted when the code interpreter call is completed. - # # - # # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.code_interpreter_call.completed"] - # # - # def initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.completed") + # Emitted when the code interpreter call is completed. + # + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] + # @param output_index [Integer] + # @param type [Symbol, :"response.code_interpreter_call.completed"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb index 66aa4fce..3277ceef 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_in_progress_event.rb @@ -22,16 +22,12 @@ class ResponseCodeInterpreterCallInProgressEvent < OpenAI::Internal::Type::BaseM # @return [Symbol, :"response.code_interpreter_call.in_progress"] required :type, const: :"response.code_interpreter_call.in_progress" - # @!parse - # # Emitted when a code interpreter call is in progress. - # # - # # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.code_interpreter_call.in_progress"] - # # - # def initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.in_progress") + # Emitted when a code interpreter call is in progress. + # + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] + # @param output_index [Integer] + # @param type [Symbol, :"response.code_interpreter_call.in_progress"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb index 7a87f2c2..3a361629 100644 --- a/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb +++ b/lib/openai/models/responses/response_code_interpreter_call_interpreting_event.rb @@ -22,16 +22,12 @@ class ResponseCodeInterpreterCallInterpretingEvent < OpenAI::Internal::Type::Bas # @return [Symbol, :"response.code_interpreter_call.interpreting"] required :type, const: :"response.code_interpreter_call.interpreting" - # @!parse - # # Emitted when the code interpreter is actively interpreting the code snippet. - # # - # # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.code_interpreter_call.interpreting"] - # # - # def initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.interpreting", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code_interpreter_call:, output_index:, type: :"response.code_interpreter_call.interpreting") + # Emitted when the code interpreter is actively interpreting the code snippet. + # + # @param code_interpreter_call [OpenAI::Models::Responses::ResponseCodeInterpreterToolCall] + # @param output_index [Integer] + # @param type [Symbol, :"response.code_interpreter_call.interpreting"] end end end diff --git a/lib/openai/models/responses/response_code_interpreter_tool_call.rb b/lib/openai/models/responses/response_code_interpreter_tool_call.rb index 9d7260d9..cbab454f 100644 --- a/lib/openai/models/responses/response_code_interpreter_tool_call.rb +++ b/lib/openai/models/responses/response_code_interpreter_tool_call.rb @@ -35,18 +35,14 @@ class ResponseCodeInterpreterToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter_call] required :type, const: :code_interpreter_call - # @!parse - # # A tool call to run code. - # # - # # @param id [String] - # # @param code [String] - # # @param results [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] - # # @param type [Symbol, :code_interpreter_call] - # # - # def initialize(id:, code:, results:, status:, type: :code_interpreter_call, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, code:, results:, status:, type: :code_interpreter_call) + # A tool call to run code. + # + # @param id [String] + # @param code [String] + # @param results [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Status] + # @param type [Symbol, :code_interpreter_call] # The output of a code interpreter tool call that is text. module Result @@ -73,15 +69,11 @@ class Logs < OpenAI::Internal::Type::BaseModel # @return [Symbol, :logs] required :type, const: :logs - # @!parse - # # The output of a code interpreter tool call that is text. - # # - # # @param logs [String] - # # @param type [Symbol, :logs] - # # - # def initialize(logs:, type: :logs, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(logs:, type: :logs) + # The output of a code interpreter tool call that is text. + # + # @param logs [String] + # @param type [Symbol, :logs] end class Files < OpenAI::Internal::Type::BaseModel @@ -97,15 +89,11 @@ class Files < OpenAI::Internal::Type::BaseModel # @return [Symbol, :files] required :type, const: :files - # @!parse - # # The output of a code interpreter tool call that is a file. - # # - # # @param files [Array] - # # @param type [Symbol, :files] - # # - # def initialize(files:, type: :files, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(files:, type: :files) + # The output of a code interpreter tool call that is a file. + # + # @param files [Array] + # @param type [Symbol, :files] class File < OpenAI::Internal::Type::BaseModel # @!attribute file_id @@ -120,19 +108,14 @@ class File < OpenAI::Internal::Type::BaseModel # @return [String] required :mime_type, String - # @!parse - # # @param file_id [String] - # # @param mime_type [String] - # # - # def initialize(file_id:, mime_type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, mime_type:) + # @param file_id [String] + # @param mime_type [String] end end - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Logs, OpenAI::Models::Responses::ResponseCodeInterpreterToolCall::Result::Files)] end # The status of the code interpreter tool call. @@ -145,11 +128,8 @@ module Status INTERPRETING = :interpreting COMPLETED = :completed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_completed_event.rb b/lib/openai/models/responses/response_completed_event.rb index 883a7de9..509663df 100644 --- a/lib/openai/models/responses/response_completed_event.rb +++ b/lib/openai/models/responses/response_completed_event.rb @@ -16,15 +16,11 @@ class ResponseCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.completed"] required :type, const: :"response.completed" - # @!parse - # # Emitted when the model response is complete. - # # - # # @param response [OpenAI::Models::Responses::Response] - # # @param type [Symbol, :"response.completed"] - # # - # def initialize(response:, type: :"response.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(response:, type: :"response.completed") + # Emitted when the model response is complete. + # + # @param response [OpenAI::Models::Responses::Response] + # @param type [Symbol, :"response.completed"] end end end diff --git a/lib/openai/models/responses/response_computer_tool_call.rb b/lib/openai/models/responses/response_computer_tool_call.rb index c8d6e68a..b84db1e5 100644 --- a/lib/openai/models/responses/response_computer_tool_call.rb +++ b/lib/openai/models/responses/response_computer_tool_call.rb @@ -42,21 +42,17 @@ class ResponseComputerToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] required :type, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCall::Type } - # @!parse - # # A tool call to a computer use tool. See the - # # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) - # # for more information. - # # - # # @param id [String] - # # @param action [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] - # # @param call_id [String] - # # @param pending_safety_checks [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] - # # @param type [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] - # # - # def initialize(id:, action:, call_id:, pending_safety_checks:, status:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, action:, call_id:, pending_safety_checks:, status:, type:) + # A tool call to a computer use tool. See the + # [computer use guide](https://platform.openai.com/docs/guides/tools-computer-use) + # for more information. + # + # @param id [String] + # @param action [OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait] + # @param call_id [String] + # @param pending_safety_checks [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Status] + # @param type [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Type] # A click action. # @@ -120,17 +116,13 @@ class Click < OpenAI::Internal::Type::BaseModel # @return [Integer] required :y_, Integer, api_name: :y - # @!parse - # # A click action. - # # - # # @param button [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] - # # @param x [Integer] - # # @param y_ [Integer] - # # @param type [Symbol, :click] - # # - # def initialize(button:, x:, y_:, type: :click, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(button:, x:, y_:, type: :click) + # A click action. + # + # @param button [Symbol, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click::Button] + # @param x [Integer] + # @param y_ [Integer] + # @param type [Symbol, :click] # Indicates which mouse button was pressed during the click. One of `left`, # `right`, `wheel`, `back`, or `forward`. @@ -145,11 +137,8 @@ module Button BACK = :back FORWARD = :forward - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -173,16 +162,12 @@ class DoubleClick < OpenAI::Internal::Type::BaseModel # @return [Integer] required :y_, Integer, api_name: :y - # @!parse - # # A double click action. - # # - # # @param x [Integer] - # # @param y_ [Integer] - # # @param type [Symbol, :double_click] - # # - # def initialize(x:, y_:, type: :double_click, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(x:, y_:, type: :double_click) + # A double click action. + # + # @param x [Integer] + # @param y_ [Integer] + # @param type [Symbol, :double_click] end class Drag < OpenAI::Internal::Type::BaseModel @@ -208,15 +193,11 @@ class Drag < OpenAI::Internal::Type::BaseModel # @return [Symbol, :drag] required :type, const: :drag - # @!parse - # # A drag action. - # # - # # @param path [Array] - # # @param type [Symbol, :drag] - # # - # def initialize(path:, type: :drag, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(path:, type: :drag) + # A drag action. + # + # @param path [Array] + # @param type [Symbol, :drag] class Path < OpenAI::Internal::Type::BaseModel # @!attribute x @@ -231,15 +212,11 @@ class Path < OpenAI::Internal::Type::BaseModel # @return [Integer] required :y_, Integer, api_name: :y - # @!parse - # # A series of x/y coordinate pairs in the drag path. - # # - # # @param x [Integer] - # # @param y_ [Integer] - # # - # def initialize(x:, y_:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(x:, y_:) + # A series of x/y coordinate pairs in the drag path. + # + # @param x [Integer] + # @param y_ [Integer] end end @@ -258,15 +235,11 @@ class Keypress < OpenAI::Internal::Type::BaseModel # @return [Symbol, :keypress] required :type, const: :keypress - # @!parse - # # A collection of keypresses the model would like to perform. - # # - # # @param keys [Array] - # # @param type [Symbol, :keypress] - # # - # def initialize(keys:, type: :keypress, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(keys:, type: :keypress) + # A collection of keypresses the model would like to perform. + # + # @param keys [Array] + # @param type [Symbol, :keypress] end class Move < OpenAI::Internal::Type::BaseModel @@ -289,16 +262,12 @@ class Move < OpenAI::Internal::Type::BaseModel # @return [Integer] required :y_, Integer, api_name: :y - # @!parse - # # A mouse move action. - # # - # # @param x [Integer] - # # @param y_ [Integer] - # # @param type [Symbol, :move] - # # - # def initialize(x:, y_:, type: :move, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(x:, y_:, type: :move) + # A mouse move action. + # + # @param x [Integer] + # @param y_ [Integer] + # @param type [Symbol, :move] end class Screenshot < OpenAI::Internal::Type::BaseModel @@ -309,14 +278,10 @@ class Screenshot < OpenAI::Internal::Type::BaseModel # @return [Symbol, :screenshot] required :type, const: :screenshot - # @!parse - # # A screenshot action. - # # - # # @param type [Symbol, :screenshot] - # # - # def initialize(type: :screenshot, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :screenshot) + # A screenshot action. + # + # @param type [Symbol, :screenshot] end class Scroll < OpenAI::Internal::Type::BaseModel @@ -351,18 +316,14 @@ class Scroll < OpenAI::Internal::Type::BaseModel # @return [Integer] required :y_, Integer, api_name: :y - # @!parse - # # A scroll action. - # # - # # @param scroll_x [Integer] - # # @param scroll_y [Integer] - # # @param x [Integer] - # # @param y_ [Integer] - # # @param type [Symbol, :scroll] - # # - # def initialize(scroll_x:, scroll_y:, x:, y_:, type: :scroll, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(scroll_x:, scroll_y:, x:, y_:, type: :scroll) + # A scroll action. + # + # @param scroll_x [Integer] + # @param scroll_y [Integer] + # @param x [Integer] + # @param y_ [Integer] + # @param type [Symbol, :scroll] end class Type < OpenAI::Internal::Type::BaseModel @@ -379,15 +340,11 @@ class Type < OpenAI::Internal::Type::BaseModel # @return [Symbol, :type] required :type, const: :type - # @!parse - # # An action to type in text. - # # - # # @param text [String] - # # @param type [Symbol, :type] - # # - # def initialize(text:, type: :type, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :type) + # An action to type in text. + # + # @param text [String] + # @param type [Symbol, :type] end class Wait < OpenAI::Internal::Type::BaseModel @@ -398,19 +355,14 @@ class Wait < OpenAI::Internal::Type::BaseModel # @return [Symbol, :wait] required :type, const: :wait - # @!parse - # # A wait action. - # # - # # @param type [Symbol, :wait] - # # - # def initialize(type: :wait, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type: :wait) + # A wait action. + # + # @param type [Symbol, :wait] end - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseComputerToolCall::Action::Click, OpenAI::Models::Responses::ResponseComputerToolCall::Action::DoubleClick, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Drag, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Keypress, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Move, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Screenshot, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Scroll, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Type, OpenAI::Models::Responses::ResponseComputerToolCall::Action::Wait)] end class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel @@ -432,16 +384,12 @@ class PendingSafetyCheck < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # A pending safety check for the computer call. - # # - # # @param id [String] - # # @param code [String] - # # @param message [String] - # # - # def initialize(id:, code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, code:, message:) + # A pending safety check for the computer call. + # + # @param id [String] + # @param code [String] + # @param message [String] end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -455,11 +403,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of the computer call. Always `computer_call`. @@ -470,11 +415,8 @@ module Type COMPUTER_CALL = :computer_call - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 2cc50149..2a10286a 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -51,17 +51,13 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] # attr_writer :status - # @!parse - # # @param id [String] - # # @param call_id [String] - # # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] - # # @param acknowledged_safety_checks [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] - # # @param type [Symbol, :computer_call_output] - # # - # def initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) + # @param id [String] + # @param call_id [String] + # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] + # @param acknowledged_safety_checks [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] + # @param type [Symbol, :computer_call_output] class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id @@ -82,16 +78,12 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # A pending safety check for the computer call. - # # - # # @param id [String] - # # @param code [String] - # # @param message [String] - # # - # def initialize(id:, code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, code:, message:) + # A pending safety check for the computer call. + # + # @param id [String] + # @param code [String] + # @param message [String] end # The status of the message input. One of `in_progress`, `completed`, or @@ -105,11 +97,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb index 9cfc543a..e65f4fc5 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -31,16 +31,12 @@ class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseMod # # @return [String] # attr_writer :image_url - # @!parse - # # A computer screenshot image used with the computer use tool. - # # - # # @param file_id [String] - # # @param image_url [String] - # # @param type [Symbol, :computer_screenshot] - # # - # def initialize(file_id: nil, image_url: nil, type: :computer_screenshot, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id: nil, image_url: nil, type: :computer_screenshot) + # A computer screenshot image used with the computer use tool. + # + # @param file_id [String] + # @param image_url [String] + # @param type [Symbol, :computer_screenshot] end end end diff --git a/lib/openai/models/responses/response_content.rb b/lib/openai/models/responses/response_content.rb index 3c306ddb..ed9ff454 100644 --- a/lib/openai/models/responses/response_content.rb +++ b/lib/openai/models/responses/response_content.rb @@ -22,9 +22,8 @@ module ResponseContent # A refusal from the model. variant -> { OpenAI::Models::Responses::ResponseOutputRefusal } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile, OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_content_part_added_event.rb b/lib/openai/models/responses/response_content_part_added_event.rb index 2d78d1ff..8399e487 100644 --- a/lib/openai/models/responses/response_content_part_added_event.rb +++ b/lib/openai/models/responses/response_content_part_added_event.rb @@ -34,18 +34,14 @@ class ResponseContentPartAddedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.content_part.added"] required :type, const: :"response.content_part.added" - # @!parse - # # Emitted when a new content part is added. - # # - # # @param content_index [Integer] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] - # # @param type [Symbol, :"response.content_part.added"] - # # - # def initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.added", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.added") + # Emitted when a new content part is added. + # + # @param content_index [Integer] + # @param item_id [String] + # @param output_index [Integer] + # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + # @param type [Symbol, :"response.content_part.added"] # The content part that was added. # @@ -61,9 +57,8 @@ module Part # A refusal from the model. variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_content_part_done_event.rb b/lib/openai/models/responses/response_content_part_done_event.rb index d28334f9..945ef949 100644 --- a/lib/openai/models/responses/response_content_part_done_event.rb +++ b/lib/openai/models/responses/response_content_part_done_event.rb @@ -34,18 +34,14 @@ class ResponseContentPartDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.content_part.done"] required :type, const: :"response.content_part.done" - # @!parse - # # Emitted when a content part is done. - # # - # # @param content_index [Integer] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] - # # @param type [Symbol, :"response.content_part.done"] - # # - # def initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, item_id:, output_index:, part:, type: :"response.content_part.done") + # Emitted when a content part is done. + # + # @param content_index [Integer] + # @param item_id [String] + # @param output_index [Integer] + # @param part [OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal] + # @param type [Symbol, :"response.content_part.done"] # The content part that is done. # @@ -61,9 +57,8 @@ module Part # A refusal from the model. variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end end end diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 1969ec3a..3eeb57fc 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -225,53 +225,26 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :user - # @!parse - # # @param input [String, Array] - # # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] - # # @param include [Array, nil] - # # @param instructions [String, nil] - # # @param max_output_tokens [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param parallel_tool_calls [Boolean, nil] - # # @param previous_response_id [String, nil] - # # @param reasoning [OpenAI::Models::Reasoning, nil] - # # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] - # # @param store [Boolean, nil] - # # @param temperature [Float, nil] - # # @param text [OpenAI::Models::Responses::ResponseTextConfig] - # # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - # # @param tools [Array] - # # @param top_p [Float, nil] - # # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] - # # @param user [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # input:, - # model:, - # include: nil, - # instructions: nil, - # max_output_tokens: nil, - # metadata: nil, - # parallel_tool_calls: nil, - # previous_response_id: nil, - # reasoning: nil, - # service_tier: nil, - # store: nil, - # temperature: nil, - # text: nil, - # tool_choice: nil, - # tools: nil, - # top_p: nil, - # truncation: nil, - # user: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) + # @param input [String, Array] + # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] + # @param include [Array, nil] + # @param instructions [String, nil] + # @param max_output_tokens [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param parallel_tool_calls [Boolean, nil] + # @param previous_response_id [String, nil] + # @param reasoning [OpenAI::Models::Reasoning, nil] + # @param service_tier [Symbol, OpenAI::Models::Responses::ResponseCreateParams::ServiceTier, nil] + # @param store [Boolean, nil] + # @param temperature [Float, nil] + # @param text [OpenAI::Models::Responses::ResponseTextConfig] + # @param tool_choice [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] + # @param tools [Array] + # @param top_p [Float, nil] + # @param truncation [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] + # @param user [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Text, image, or file inputs to the model, used to generate a response. # @@ -293,9 +266,8 @@ module Input # different content types. variant -> { OpenAI::Models::Responses::ResponseInput } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] end # Specifies the latency tier to use for processing the request. This parameter is @@ -322,11 +294,8 @@ module ServiceTier DEFAULT = :default FLEX = :flex - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # How the model should select which tool (or tools) to use when generating a @@ -352,9 +321,8 @@ module ToolChoice # Use this option to force the model to call a specific function. variant -> { OpenAI::Models::Responses::ToolChoiceFunction } - # @!parse - # # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] - # def self.variants; end + # @!method self.variants + # @return [Array(Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction)] end # The truncation strategy to use for the model response. @@ -370,11 +338,8 @@ module Truncation AUTO = :auto DISABLED = :disabled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_created_event.rb b/lib/openai/models/responses/response_created_event.rb index 2eb916c1..ad5cf6d0 100644 --- a/lib/openai/models/responses/response_created_event.rb +++ b/lib/openai/models/responses/response_created_event.rb @@ -16,15 +16,11 @@ class ResponseCreatedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.created"] required :type, const: :"response.created" - # @!parse - # # An event that is emitted when a response is created. - # # - # # @param response [OpenAI::Models::Responses::Response] - # # @param type [Symbol, :"response.created"] - # # - # def initialize(response:, type: :"response.created", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(response:, type: :"response.created") + # An event that is emitted when a response is created. + # + # @param response [OpenAI::Models::Responses::Response] + # @param type [Symbol, :"response.created"] end end end diff --git a/lib/openai/models/responses/response_delete_params.rb b/lib/openai/models/responses/response_delete_params.rb index 516661d0..e3d6735f 100644 --- a/lib/openai/models/responses/response_delete_params.rb +++ b/lib/openai/models/responses/response_delete_params.rb @@ -9,12 +9,8 @@ class ResponseDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/responses/response_error.rb b/lib/openai/models/responses/response_error.rb index a42f876f..3727a834 100644 --- a/lib/openai/models/responses/response_error.rb +++ b/lib/openai/models/responses/response_error.rb @@ -16,15 +16,11 @@ class ResponseError < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # An error object returned when the model fails to generate a Response. - # # - # # @param code [Symbol, OpenAI::Models::Responses::ResponseError::Code] - # # @param message [String] - # # - # def initialize(code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:) + # An error object returned when the model fails to generate a Response. + # + # @param code [Symbol, OpenAI::Models::Responses::ResponseError::Code] + # @param message [String] # The error code for the response. # @@ -51,11 +47,8 @@ module Code FAILED_TO_DOWNLOAD_IMAGE = :failed_to_download_image IMAGE_FILE_NOT_FOUND = :image_file_not_found - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_error_event.rb b/lib/openai/models/responses/response_error_event.rb index 5558c8c0..d3bfa55e 100644 --- a/lib/openai/models/responses/response_error_event.rb +++ b/lib/openai/models/responses/response_error_event.rb @@ -28,17 +28,13 @@ class ResponseErrorEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :error] required :type, const: :error - # @!parse - # # Emitted when an error occurs. - # # - # # @param code [String, nil] - # # @param message [String] - # # @param param [String, nil] - # # @param type [Symbol, :error] - # # - # def initialize(code:, message:, param:, type: :error, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:, param:, type: :error) + # Emitted when an error occurs. + # + # @param code [String, nil] + # @param message [String] + # @param param [String, nil] + # @param type [Symbol, :error] end end end diff --git a/lib/openai/models/responses/response_failed_event.rb b/lib/openai/models/responses/response_failed_event.rb index aa99a457..cbed3cb7 100644 --- a/lib/openai/models/responses/response_failed_event.rb +++ b/lib/openai/models/responses/response_failed_event.rb @@ -16,15 +16,11 @@ class ResponseFailedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.failed"] required :type, const: :"response.failed" - # @!parse - # # An event that is emitted when a response fails. - # # - # # @param response [OpenAI::Models::Responses::Response] - # # @param type [Symbol, :"response.failed"] - # # - # def initialize(response:, type: :"response.failed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(response:, type: :"response.failed") + # An event that is emitted when a response fails. + # + # @param response [OpenAI::Models::Responses::Response] + # @param type [Symbol, :"response.failed"] end end end diff --git a/lib/openai/models/responses/response_file_search_call_completed_event.rb b/lib/openai/models/responses/response_file_search_call_completed_event.rb index 458771a8..ffb7c68d 100644 --- a/lib/openai/models/responses/response_file_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_file_search_call_completed_event.rb @@ -22,16 +22,12 @@ class ResponseFileSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.file_search_call.completed"] required :type, const: :"response.file_search_call.completed" - # @!parse - # # Emitted when a file search call is completed (results found). - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.file_search_call.completed"] - # # - # def initialize(item_id:, output_index:, type: :"response.file_search_call.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.completed") + # Emitted when a file search call is completed (results found). + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.file_search_call.completed"] end end end diff --git a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb index 4671a7e6..72fafbc7 100644 --- a/lib/openai/models/responses/response_file_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_file_search_call_in_progress_event.rb @@ -22,16 +22,12 @@ class ResponseFileSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.file_search_call.in_progress"] required :type, const: :"response.file_search_call.in_progress" - # @!parse - # # Emitted when a file search call is initiated. - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.file_search_call.in_progress"] - # # - # def initialize(item_id:, output_index:, type: :"response.file_search_call.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.in_progress") + # Emitted when a file search call is initiated. + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.file_search_call.in_progress"] end end end diff --git a/lib/openai/models/responses/response_file_search_call_searching_event.rb b/lib/openai/models/responses/response_file_search_call_searching_event.rb index 880e33e1..07c1186a 100644 --- a/lib/openai/models/responses/response_file_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_file_search_call_searching_event.rb @@ -22,16 +22,12 @@ class ResponseFileSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.file_search_call.searching"] required :type, const: :"response.file_search_call.searching" - # @!parse - # # Emitted when a file search is currently searching. - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.file_search_call.searching"] - # # - # def initialize(item_id:, output_index:, type: :"response.file_search_call.searching", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.file_search_call.searching") + # Emitted when a file search is currently searching. + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.file_search_call.searching"] end end end diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 7048c7a4..43a865fd 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -37,20 +37,16 @@ class ResponseFileSearchToolCall < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseFileSearchToolCall::Result] }, nil?: true - # @!parse - # # The results of a file search tool call. See the - # # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) - # # for more information. - # # - # # @param id [String] - # # @param queries [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] - # # @param results [Array, nil] - # # @param type [Symbol, :file_search_call] - # # - # def initialize(id:, queries:, status:, results: nil, type: :file_search_call, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, queries:, status:, results: nil, type: :file_search_call) + # The results of a file search tool call. See the + # [file search guide](https://platform.openai.com/docs/guides/tools-file-search) + # for more information. + # + # @param id [String] + # @param queries [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseFileSearchToolCall::Status] + # @param results [Array, nil] + # @param type [Symbol, :file_search_call] # The status of the file search tool call. One of `in_progress`, `searching`, # `incomplete` or `failed`, @@ -65,11 +61,8 @@ module Status INCOMPLETE = :incomplete FAILED = :failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end class Result < OpenAI::Internal::Type::BaseModel @@ -125,16 +118,12 @@ class Result < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :text - # @!parse - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param file_id [String] - # # @param filename [String] - # # @param score [Float] - # # @param text [String] - # # - # def initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil) + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param file_id [String] + # @param filename [String] + # @param score [Float] + # @param text [String] module Attribute extend OpenAI::Internal::Type::Union @@ -145,9 +134,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/responses/response_format_text_config.rb b/lib/openai/models/responses/response_format_text_config.rb index bdb5e64f..6b73450e 100644 --- a/lib/openai/models/responses/response_format_text_config.rb +++ b/lib/openai/models/responses/response_format_text_config.rb @@ -34,9 +34,8 @@ module ResponseFormatTextConfig # to do so. variant :json_object, -> { OpenAI::Models::ResponseFormatJSONObject } - # @!parse - # # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject)] end end end diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index 3cbc68bd..c6271a6a 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -45,20 +45,16 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :strict, OpenAI::Internal::Type::Boolean, nil?: true - # @!parse - # # JSON Schema response format. Used to generate structured JSON responses. Learn - # # more about - # # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). - # # - # # @param name [String] - # # @param schema [Hash{Symbol=>Object}] - # # @param description [String] - # # @param strict [Boolean, nil] - # # @param type [Symbol, :json_schema] - # # - # def initialize(name:, schema:, description: nil, strict: nil, type: :json_schema, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, schema:, description: nil, strict: nil, type: :json_schema) + # JSON Schema response format. Used to generate structured JSON responses. Learn + # more about + # [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + # + # @param name [String] + # @param schema [Hash{Symbol=>Object}] + # @param description [String] + # @param strict [Boolean, nil] + # @param type [Symbol, :json_schema] end end end diff --git a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb index 6005ba84..abd84e9c 100644 --- a/lib/openai/models/responses/response_function_call_arguments_delta_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_delta_event.rb @@ -28,17 +28,13 @@ class ResponseFunctionCallArgumentsDeltaEvent < OpenAI::Internal::Type::BaseMode # @return [Symbol, :"response.function_call_arguments.delta"] required :type, const: :"response.function_call_arguments.delta" - # @!parse - # # Emitted when there is a partial function-call arguments delta. - # # - # # @param delta [String] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.function_call_arguments.delta"] - # # - # def initialize(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(delta:, item_id:, output_index:, type: :"response.function_call_arguments.delta") + # Emitted when there is a partial function-call arguments delta. + # + # @param delta [String] + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.function_call_arguments.delta"] end end end diff --git a/lib/openai/models/responses/response_function_call_arguments_done_event.rb b/lib/openai/models/responses/response_function_call_arguments_done_event.rb index a9280f46..ba402026 100644 --- a/lib/openai/models/responses/response_function_call_arguments_done_event.rb +++ b/lib/openai/models/responses/response_function_call_arguments_done_event.rb @@ -27,17 +27,13 @@ class ResponseFunctionCallArgumentsDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.function_call_arguments.done"] required :type, const: :"response.function_call_arguments.done" - # @!parse - # # Emitted when function-call arguments are finalized. - # # - # # @param arguments [String] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.function_call_arguments.done"] - # # - # def initialize(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, item_id:, output_index:, type: :"response.function_call_arguments.done") + # Emitted when function-call arguments are finalized. + # + # @param arguments [String] + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.function_call_arguments.done"] end end end diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 5bf53133..25210a43 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -49,21 +49,17 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] # attr_writer :status - # @!parse - # # A tool call to run a function. See the - # # [function calling guide](https://platform.openai.com/docs/guides/function-calling) - # # for more information. - # # - # # @param arguments [String] - # # @param call_id [String] - # # @param name [String] - # # @param id [String] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] - # # @param type [Symbol, :function_call] - # # - # def initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) + # A tool call to run a function. See the + # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # for more information. + # + # @param arguments [String] + # @param call_id [String] + # @param name [String] + # @param id [String] + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] + # @param type [Symbol, :function_call] # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -76,11 +72,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_function_tool_call_item.rb b/lib/openai/models/responses/response_function_tool_call_item.rb index 17e2ceff..2244965f 100644 --- a/lib/openai/models/responses/response_function_tool_call_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_item.rb @@ -10,16 +10,12 @@ class ResponseFunctionToolCallItem < OpenAI::Models::Responses::ResponseFunction # @return [String] required :id, String - # @!parse - # # A tool call to run a function. See the - # # [function calling guide](https://platform.openai.com/docs/guides/function-calling) - # # for more information. - # # - # # @param id [String] - # # - # def initialize(id:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:) + # A tool call to run a function. See the + # [function calling guide](https://platform.openai.com/docs/guides/function-calling) + # for more information. + # + # @param id [String] end end end diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 2123bd97..6eb9b8a9 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -39,16 +39,12 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] # attr_writer :status - # @!parse - # # @param id [String] - # # @param call_id [String] - # # @param output [String] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] - # # @param type [Symbol, :function_call_output] - # # - # def initialize(id:, call_id:, output:, status: nil, type: :function_call_output, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, call_id:, output:, status: nil, type: :function_call_output) + # @param id [String] + # @param call_id [String] + # @param output [String] + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] + # @param type [Symbol, :function_call_output] # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -61,11 +57,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_function_web_search.rb b/lib/openai/models/responses/response_function_web_search.rb index e6d9a2e8..3529a019 100644 --- a/lib/openai/models/responses/response_function_web_search.rb +++ b/lib/openai/models/responses/response_function_web_search.rb @@ -22,18 +22,14 @@ class ResponseFunctionWebSearch < OpenAI::Internal::Type::BaseModel # @return [Symbol, :web_search_call] required :type, const: :web_search_call - # @!parse - # # The results of a web search tool call. See the - # # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for - # # more information. - # # - # # @param id [String] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] - # # @param type [Symbol, :web_search_call] - # # - # def initialize(id:, status:, type: :web_search_call, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, status:, type: :web_search_call) + # The results of a web search tool call. See the + # [web search guide](https://platform.openai.com/docs/guides/tools-web-search) for + # more information. + # + # @param id [String] + # @param status [Symbol, OpenAI::Models::Responses::ResponseFunctionWebSearch::Status] + # @param type [Symbol, :web_search_call] # The status of the web search tool call. # @@ -46,11 +42,8 @@ module Status COMPLETED = :completed FAILED = :failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_in_progress_event.rb b/lib/openai/models/responses/response_in_progress_event.rb index a26c5659..0259d443 100644 --- a/lib/openai/models/responses/response_in_progress_event.rb +++ b/lib/openai/models/responses/response_in_progress_event.rb @@ -16,15 +16,11 @@ class ResponseInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.in_progress"] required :type, const: :"response.in_progress" - # @!parse - # # Emitted when the response is in progress. - # # - # # @param response [OpenAI::Models::Responses::Response] - # # @param type [Symbol, :"response.in_progress"] - # # - # def initialize(response:, type: :"response.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(response:, type: :"response.in_progress") + # Emitted when the response is in progress. + # + # @param response [OpenAI::Models::Responses::Response] + # @param type [Symbol, :"response.in_progress"] end end end diff --git a/lib/openai/models/responses/response_includable.rb b/lib/openai/models/responses/response_includable.rb index 71a49423..2f90f277 100644 --- a/lib/openai/models/responses/response_includable.rb +++ b/lib/openai/models/responses/response_includable.rb @@ -18,11 +18,8 @@ module ResponseIncludable MESSAGE_INPUT_IMAGE_IMAGE_URL = :"message.input_image.image_url" COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = :"computer_call_output.output.image_url" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_incomplete_event.rb b/lib/openai/models/responses/response_incomplete_event.rb index 08fb757a..5536418d 100644 --- a/lib/openai/models/responses/response_incomplete_event.rb +++ b/lib/openai/models/responses/response_incomplete_event.rb @@ -16,15 +16,11 @@ class ResponseIncompleteEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.incomplete"] required :type, const: :"response.incomplete" - # @!parse - # # An event that is emitted when a response finishes as incomplete. - # # - # # @param response [OpenAI::Models::Responses::Response] - # # @param type [Symbol, :"response.incomplete"] - # # - # def initialize(response:, type: :"response.incomplete", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(response:, type: :"response.incomplete") + # An event that is emitted when a response finishes as incomplete. + # + # @param response [OpenAI::Models::Responses::Response] + # @param type [Symbol, :"response.incomplete"] end end end diff --git a/lib/openai/models/responses/response_input_audio.rb b/lib/openai/models/responses/response_input_audio.rb index 90dcdee6..f03f775a 100644 --- a/lib/openai/models/responses/response_input_audio.rb +++ b/lib/openai/models/responses/response_input_audio.rb @@ -22,16 +22,12 @@ class ResponseInputAudio < OpenAI::Internal::Type::BaseModel # @return [Symbol, :input_audio] required :type, const: :input_audio - # @!parse - # # An audio input to the model. - # # - # # @param data [String] - # # @param format_ [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] - # # @param type [Symbol, :input_audio] - # # - # def initialize(data:, format_:, type: :input_audio, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, format_:, type: :input_audio) + # An audio input to the model. + # + # @param data [String] + # @param format_ [Symbol, OpenAI::Models::Responses::ResponseInputAudio::Format] + # @param type [Symbol, :input_audio] # The format of the audio data. Currently supported formats are `mp3` and `wav`. # @@ -42,11 +38,8 @@ module Format MP3 = :mp3 WAV = :wav - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_input_content.rb b/lib/openai/models/responses/response_input_content.rb index 901a5159..806d9c26 100644 --- a/lib/openai/models/responses/response_input_content.rb +++ b/lib/openai/models/responses/response_input_content.rb @@ -18,9 +18,8 @@ module ResponseInputContent # A file input to the model. variant :input_file, -> { OpenAI::Models::Responses::ResponseInputFile } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseInputText, OpenAI::Models::Responses::ResponseInputImage, OpenAI::Models::Responses::ResponseInputFile)] end end end diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index 0e2a3c73..4b186be4 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -40,17 +40,13 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :filename - # @!parse - # # A file input to the model. - # # - # # @param file_data [String] - # # @param file_id [String] - # # @param filename [String] - # # @param type [Symbol, :input_file] - # # - # def initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file) + # A file input to the model. + # + # @param file_data [String] + # @param file_id [String] + # @param filename [String] + # @param type [Symbol, :input_file] end end end diff --git a/lib/openai/models/responses/response_input_image.rb b/lib/openai/models/responses/response_input_image.rb index c508cf8a..6a07ce69 100644 --- a/lib/openai/models/responses/response_input_image.rb +++ b/lib/openai/models/responses/response_input_image.rb @@ -30,18 +30,14 @@ class ResponseInputImage < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :image_url, String, nil?: true - # @!parse - # # An image input to the model. Learn about - # # [image inputs](https://platform.openai.com/docs/guides/vision). - # # - # # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] - # # @param file_id [String, nil] - # # @param image_url [String, nil] - # # @param type [Symbol, :input_image] - # # - # def initialize(detail:, file_id: nil, image_url: nil, type: :input_image, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(detail:, file_id: nil, image_url: nil, type: :input_image) + # An image input to the model. Learn about + # [image inputs](https://platform.openai.com/docs/guides/vision). + # + # @param detail [Symbol, OpenAI::Models::Responses::ResponseInputImage::Detail] + # @param file_id [String, nil] + # @param image_url [String, nil] + # @param type [Symbol, :input_image] # The detail level of the image to be sent to the model. One of `high`, `low`, or # `auto`. Defaults to `auto`. @@ -54,11 +50,8 @@ module Detail LOW = :low AUTO = :auto - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index db0e26d2..d70b58bc 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -93,19 +93,15 @@ class Message < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] # attr_writer :type - # @!parse - # # A message input to the model with a role indicating instruction following - # # hierarchy. Instructions given with the `developer` or `system` role take - # # precedence over instructions given with the `user` role. - # # - # # @param content [Array] - # # @param role [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] - # # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] - # # - # def initialize(content:, role:, status: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content:, role:, status: nil, type: nil) + # A message input to the model with a role indicating instruction following + # hierarchy. Instructions given with the `developer` or `system` role take + # precedence over instructions given with the `user` role. + # + # @param content [Array] + # @param role [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] # The role of the message input. One of `user`, `system`, or `developer`. # @@ -117,11 +113,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -135,11 +128,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of the message input. Always set to `message`. @@ -150,11 +140,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -210,19 +197,15 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] # attr_writer :status - # @!parse - # # The output of a computer tool call. - # # - # # @param call_id [String] - # # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] - # # @param id [String] - # # @param acknowledged_safety_checks [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] - # # @param type [Symbol, :computer_call_output] - # # - # def initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) + # The output of a computer tool call. + # + # @param call_id [String] + # @param output [OpenAI::Models::Responses::ResponseComputerToolCallOutputScreenshot] + # @param id [String] + # @param acknowledged_safety_checks [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] + # @param type [Symbol, :computer_call_output] class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @!attribute id @@ -243,16 +226,12 @@ class AcknowledgedSafetyCheck < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # A pending safety check for the computer call. - # # - # # @param id [String] - # # @param code [String] - # # @param message [String] - # # - # def initialize(id:, code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, code:, message:) + # A pending safety check for the computer call. + # + # @param id [String] + # @param code [String] + # @param message [String] end # The status of the message input. One of `in_progress`, `completed`, or @@ -266,11 +245,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -315,18 +291,14 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] # attr_writer :status - # @!parse - # # The output of a function tool call. - # # - # # @param call_id [String] - # # @param output [String] - # # @param id [String] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] - # # @param type [Symbol, :function_call_output] - # # - # def initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output) + # The output of a function tool call. + # + # @param call_id [String] + # @param output [String] + # @param id [String] + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] + # @param type [Symbol, :function_call_output] # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. @@ -339,11 +311,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -360,20 +329,15 @@ class ItemReference < OpenAI::Internal::Type::BaseModel # @return [Symbol, :item_reference] required :type, const: :item_reference - # @!parse - # # An internal identifier for an item to reference. - # # - # # @param id [String] - # # @param type [Symbol, :item_reference] - # # - # def initialize(id:, type: :item_reference, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, type: :item_reference) + # An internal identifier for an item to reference. + # + # @param id [String] + # @param type [Symbol, :item_reference] end - # @!parse - # # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::EasyInputMessage, OpenAI::Models::Responses::ResponseInputItem::Message, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput, OpenAI::Models::Responses::ResponseReasoningItem, OpenAI::Models::Responses::ResponseInputItem::ItemReference)] end end end diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index b270e74d..2c3f05d2 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -45,16 +45,12 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] # attr_writer :type - # @!parse - # # @param id [String] - # # @param content [Array] - # # @param role [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] - # # @param type [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] - # # - # def initialize(id:, content:, role:, status: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, content:, role:, status: nil, type: nil) + # @param id [String] + # @param content [Array] + # @param role [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] + # @param status [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] + # @param type [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] # The role of the message input. One of `user`, `system`, or `developer`. # @@ -66,11 +62,8 @@ module Role SYSTEM = :system DEVELOPER = :developer - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The status of item. One of `in_progress`, `completed`, or `incomplete`. @@ -84,11 +77,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # The type of the message input. Always set to `message`. @@ -99,11 +89,8 @@ module Type MESSAGE = :message - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_input_text.rb b/lib/openai/models/responses/response_input_text.rb index d8ed6f2c..ad65b3f2 100644 --- a/lib/openai/models/responses/response_input_text.rb +++ b/lib/openai/models/responses/response_input_text.rb @@ -16,15 +16,11 @@ class ResponseInputText < OpenAI::Internal::Type::BaseModel # @return [Symbol, :input_text] required :type, const: :input_text - # @!parse - # # A text input to the model. - # # - # # @param text [String] - # # @param type [Symbol, :input_text] - # # - # def initialize(text:, type: :input_text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :input_text) + # A text input to the model. + # + # @param text [String] + # @param type [Symbol, :input_text] end end end diff --git a/lib/openai/models/responses/response_item.rb b/lib/openai/models/responses/response_item.rb index 3fe0074e..42dcb8be 100644 --- a/lib/openai/models/responses/response_item.rb +++ b/lib/openai/models/responses/response_item.rb @@ -34,9 +34,8 @@ module ResponseItem variant :function_call_output, -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseInputMessageItem, OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseFunctionToolCallItem, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem)] end end end diff --git a/lib/openai/models/responses/response_item_list.rb b/lib/openai/models/responses/response_item_list.rb index d5f234c7..2b6c1e85 100644 --- a/lib/openai/models/responses/response_item_list.rb +++ b/lib/openai/models/responses/response_item_list.rb @@ -34,18 +34,14 @@ class ResponseItemList < OpenAI::Internal::Type::BaseModel # @return [Symbol, :list] required :object, const: :list - # @!parse - # # A list of Response items. - # # - # # @param data [Array] - # # @param first_id [String] - # # @param has_more [Boolean] - # # @param last_id [String] - # # @param object [Symbol, :list] - # # - # def initialize(data:, first_id:, has_more:, last_id:, object: :list, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, first_id:, has_more:, last_id:, object: :list) + # A list of Response items. + # + # @param data [Array] + # @param first_id [String] + # @param has_more [Boolean] + # @param last_id [String] + # @param object [Symbol, :list] end end diff --git a/lib/openai/models/responses/response_output_audio.rb b/lib/openai/models/responses/response_output_audio.rb index d8fb8c61..55675501 100644 --- a/lib/openai/models/responses/response_output_audio.rb +++ b/lib/openai/models/responses/response_output_audio.rb @@ -22,16 +22,12 @@ class ResponseOutputAudio < OpenAI::Internal::Type::BaseModel # @return [Symbol, :output_audio] required :type, const: :output_audio - # @!parse - # # An audio output from the model. - # # - # # @param data [String] - # # @param transcript [String] - # # @param type [Symbol, :output_audio] - # # - # def initialize(data:, transcript:, type: :output_audio, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, transcript:, type: :output_audio) + # An audio output from the model. + # + # @param data [String] + # @param transcript [String] + # @param type [Symbol, :output_audio] end end end diff --git a/lib/openai/models/responses/response_output_item.rb b/lib/openai/models/responses/response_output_item.rb index 5751bb93..83a8c4db 100644 --- a/lib/openai/models/responses/response_output_item.rb +++ b/lib/openai/models/responses/response_output_item.rb @@ -32,9 +32,8 @@ module ResponseOutputItem # a response. variant :reasoning, -> { OpenAI::Models::Responses::ResponseReasoningItem } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem)] end end end diff --git a/lib/openai/models/responses/response_output_item_added_event.rb b/lib/openai/models/responses/response_output_item_added_event.rb index 6dfbd4d9..2893bec8 100644 --- a/lib/openai/models/responses/response_output_item_added_event.rb +++ b/lib/openai/models/responses/response_output_item_added_event.rb @@ -22,16 +22,12 @@ class ResponseOutputItemAddedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_item.added"] required :type, const: :"response.output_item.added" - # @!parse - # # Emitted when a new output item is added. - # # - # # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.output_item.added"] - # # - # def initialize(item:, output_index:, type: :"response.output_item.added", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item:, output_index:, type: :"response.output_item.added") + # Emitted when a new output item is added. + # + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] + # @param output_index [Integer] + # @param type [Symbol, :"response.output_item.added"] end end end diff --git a/lib/openai/models/responses/response_output_item_done_event.rb b/lib/openai/models/responses/response_output_item_done_event.rb index 904c8eae..a8ff9471 100644 --- a/lib/openai/models/responses/response_output_item_done_event.rb +++ b/lib/openai/models/responses/response_output_item_done_event.rb @@ -22,16 +22,12 @@ class ResponseOutputItemDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_item.done"] required :type, const: :"response.output_item.done" - # @!parse - # # Emitted when an output item is marked done. - # # - # # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.output_item.done"] - # # - # def initialize(item:, output_index:, type: :"response.output_item.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item:, output_index:, type: :"response.output_item.done") + # Emitted when an output item is marked done. + # + # @param item [OpenAI::Models::Responses::ResponseOutputMessage, OpenAI::Models::Responses::ResponseFileSearchToolCall, OpenAI::Models::Responses::ResponseFunctionToolCall, OpenAI::Models::Responses::ResponseFunctionWebSearch, OpenAI::Models::Responses::ResponseComputerToolCall, OpenAI::Models::Responses::ResponseReasoningItem] + # @param output_index [Integer] + # @param type [Symbol, :"response.output_item.done"] end end end diff --git a/lib/openai/models/responses/response_output_message.rb b/lib/openai/models/responses/response_output_message.rb index b46f43d3..107a4798 100644 --- a/lib/openai/models/responses/response_output_message.rb +++ b/lib/openai/models/responses/response_output_message.rb @@ -36,18 +36,14 @@ class ResponseOutputMessage < OpenAI::Internal::Type::BaseModel # @return [Symbol, :message] required :type, const: :message - # @!parse - # # An output message from the model. - # # - # # @param id [String] - # # @param content [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] - # # @param role [Symbol, :assistant] - # # @param type [Symbol, :message] - # # - # def initialize(id:, content:, status:, role: :assistant, type: :message, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, content:, status:, role: :assistant, type: :message) + # An output message from the model. + # + # @param id [String] + # @param content [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseOutputMessage::Status] + # @param role [Symbol, :assistant] + # @param type [Symbol, :message] # A text output from the model. module Content @@ -61,9 +57,8 @@ module Content # A refusal from the model. variant :refusal, -> { OpenAI::Models::Responses::ResponseOutputRefusal } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputText, OpenAI::Models::Responses::ResponseOutputRefusal)] end # The status of the message input. One of `in_progress`, `completed`, or @@ -77,11 +72,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_output_refusal.rb b/lib/openai/models/responses/response_output_refusal.rb index 0e050ce2..0c2ccce2 100644 --- a/lib/openai/models/responses/response_output_refusal.rb +++ b/lib/openai/models/responses/response_output_refusal.rb @@ -16,15 +16,11 @@ class ResponseOutputRefusal < OpenAI::Internal::Type::BaseModel # @return [Symbol, :refusal] required :type, const: :refusal - # @!parse - # # A refusal from the model. - # # - # # @param refusal [String] - # # @param type [Symbol, :refusal] - # # - # def initialize(refusal:, type: :refusal, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(refusal:, type: :refusal) + # A refusal from the model. + # + # @param refusal [String] + # @param type [Symbol, :refusal] end end end diff --git a/lib/openai/models/responses/response_output_text.rb b/lib/openai/models/responses/response_output_text.rb index 9766a767..a857410f 100644 --- a/lib/openai/models/responses/response_output_text.rb +++ b/lib/openai/models/responses/response_output_text.rb @@ -23,16 +23,12 @@ class ResponseOutputText < OpenAI::Internal::Type::BaseModel # @return [Symbol, :output_text] required :type, const: :output_text - # @!parse - # # A text output from the model. - # # - # # @param annotations [Array] - # # @param text [String] - # # @param type [Symbol, :output_text] - # # - # def initialize(annotations:, text:, type: :output_text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(annotations:, text:, type: :output_text) + # A text output from the model. + # + # @param annotations [Array] + # @param text [String] + # @param type [Symbol, :output_text] # A citation to a file. module Annotation @@ -68,16 +64,12 @@ class FileCitation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_citation] required :type, const: :file_citation - # @!parse - # # A citation to a file. - # # - # # @param file_id [String] - # # @param index [Integer] - # # @param type [Symbol, :file_citation] - # # - # def initialize(file_id:, index:, type: :file_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, index:, type: :file_citation) + # A citation to a file. + # + # @param file_id [String] + # @param index [Integer] + # @param type [Symbol, :file_citation] end class URLCitation < OpenAI::Internal::Type::BaseModel @@ -111,18 +103,14 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!parse - # # A citation for a web resource used to generate a model response. - # # - # # @param end_index [Integer] - # # @param start_index [Integer] - # # @param title [String] - # # @param url [String] - # # @param type [Symbol, :url_citation] - # # - # def initialize(end_index:, start_index:, title:, url:, type: :url_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) + # A citation for a web resource used to generate a model response. + # + # @param end_index [Integer] + # @param start_index [Integer] + # @param title [String] + # @param url [String] + # @param type [Symbol, :url_citation] end class FilePath < OpenAI::Internal::Type::BaseModel @@ -144,21 +132,16 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_path] required :type, const: :file_path - # @!parse - # # A path to a file. - # # - # # @param file_id [String] - # # @param index [Integer] - # # @param type [Symbol, :file_path] - # # - # def initialize(file_id:, index:, type: :file_path, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, index:, type: :file_path) + # A path to a file. + # + # @param file_id [String] + # @param index [Integer] + # @param type [Symbol, :file_path] end - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseOutputText::Annotation::FileCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::URLCitation, OpenAI::Models::Responses::ResponseOutputText::Annotation::FilePath)] end end end diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index 3ae31538..fb8d5db6 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -34,18 +34,14 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] # attr_writer :status - # @!parse - # # A description of the chain of thought used by a reasoning model while generating - # # a response. - # # - # # @param id [String] - # # @param summary [Array] - # # @param status [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] - # # @param type [Symbol, :reasoning] - # # - # def initialize(id:, summary:, status: nil, type: :reasoning, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, summary:, status: nil, type: :reasoning) + # A description of the chain of thought used by a reasoning model while generating + # a response. + # + # @param id [String] + # @param summary [Array] + # @param status [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] + # @param type [Symbol, :reasoning] class Summary < OpenAI::Internal::Type::BaseModel # @!attribute text @@ -60,13 +56,9 @@ class Summary < OpenAI::Internal::Type::BaseModel # @return [Symbol, :summary_text] required :type, const: :summary_text - # @!parse - # # @param text [String] - # # @param type [Symbol, :summary_text] - # # - # def initialize(text:, type: :summary_text, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type: :summary_text) + # @param text [String] + # @param type [Symbol, :summary_text] end # The status of the item. One of `in_progress`, `completed`, or `incomplete`. @@ -80,11 +72,8 @@ module Status COMPLETED = :completed INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_refusal_delta_event.rb b/lib/openai/models/responses/response_refusal_delta_event.rb index 35247a7d..ea97e622 100644 --- a/lib/openai/models/responses/response_refusal_delta_event.rb +++ b/lib/openai/models/responses/response_refusal_delta_event.rb @@ -34,18 +34,14 @@ class ResponseRefusalDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.refusal.delta"] required :type, const: :"response.refusal.delta" - # @!parse - # # Emitted when there is a partial refusal text. - # # - # # @param content_index [Integer] - # # @param delta [String] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.refusal.delta"] - # # - # def initialize(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.refusal.delta") + # Emitted when there is a partial refusal text. + # + # @param content_index [Integer] + # @param delta [String] + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.refusal.delta"] end end end diff --git a/lib/openai/models/responses/response_refusal_done_event.rb b/lib/openai/models/responses/response_refusal_done_event.rb index 7f6cd16b..ee7b4b55 100644 --- a/lib/openai/models/responses/response_refusal_done_event.rb +++ b/lib/openai/models/responses/response_refusal_done_event.rb @@ -34,18 +34,14 @@ class ResponseRefusalDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.refusal.done"] required :type, const: :"response.refusal.done" - # @!parse - # # Emitted when refusal text is finalized. - # # - # # @param content_index [Integer] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param refusal [String] - # # @param type [Symbol, :"response.refusal.done"] - # # - # def initialize(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, item_id:, output_index:, refusal:, type: :"response.refusal.done") + # Emitted when refusal text is finalized. + # + # @param content_index [Integer] + # @param item_id [String] + # @param output_index [Integer] + # @param refusal [String] + # @param type [Symbol, :"response.refusal.done"] end end end diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 1253ccfa..1b64f738 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -21,13 +21,9 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel # # @return [Array] # attr_writer :include - # @!parse - # # @param include [Array] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(include: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(include: nil, request_options: {}) + # @param include [Array] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/responses/response_status.rb b/lib/openai/models/responses/response_status.rb index da96c2e8..eb628952 100644 --- a/lib/openai/models/responses/response_status.rb +++ b/lib/openai/models/responses/response_status.rb @@ -13,11 +13,8 @@ module ResponseStatus IN_PROGRESS = :in_progress INCOMPLETE = :incomplete - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/response_stream_event.rb b/lib/openai/models/responses/response_stream_event.rb index db86c410..d6c7fee7 100644 --- a/lib/openai/models/responses/response_stream_event.rb +++ b/lib/openai/models/responses/response_stream_event.rb @@ -121,9 +121,8 @@ module ResponseStreamEvent variant :"response.web_search_call.searching", -> { OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent } - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseAudioDeltaEvent, OpenAI::Models::Responses::ResponseAudioDoneEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDeltaEvent, OpenAI::Models::Responses::ResponseAudioTranscriptDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDeltaEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCodeDoneEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallCompletedEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInProgressEvent, OpenAI::Models::Responses::ResponseCodeInterpreterCallInterpretingEvent, OpenAI::Models::Responses::ResponseCompletedEvent, OpenAI::Models::Responses::ResponseContentPartAddedEvent, OpenAI::Models::Responses::ResponseContentPartDoneEvent, OpenAI::Models::Responses::ResponseCreatedEvent, OpenAI::Models::Responses::ResponseErrorEvent, OpenAI::Models::Responses::ResponseFileSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseFileSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseFileSearchCallSearchingEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDeltaEvent, OpenAI::Models::Responses::ResponseFunctionCallArgumentsDoneEvent, OpenAI::Models::Responses::ResponseInProgressEvent, OpenAI::Models::Responses::ResponseFailedEvent, OpenAI::Models::Responses::ResponseIncompleteEvent, OpenAI::Models::Responses::ResponseOutputItemAddedEvent, OpenAI::Models::Responses::ResponseOutputItemDoneEvent, OpenAI::Models::Responses::ResponseRefusalDeltaEvent, OpenAI::Models::Responses::ResponseRefusalDoneEvent, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent, OpenAI::Models::Responses::ResponseTextDeltaEvent, OpenAI::Models::Responses::ResponseTextDoneEvent, OpenAI::Models::Responses::ResponseWebSearchCallCompletedEvent, OpenAI::Models::Responses::ResponseWebSearchCallInProgressEvent, OpenAI::Models::Responses::ResponseWebSearchCallSearchingEvent)] end end end diff --git a/lib/openai/models/responses/response_text_annotation_delta_event.rb b/lib/openai/models/responses/response_text_annotation_delta_event.rb index bc38658a..28c8abee 100644 --- a/lib/openai/models/responses/response_text_annotation_delta_event.rb +++ b/lib/openai/models/responses/response_text_annotation_delta_event.rb @@ -41,29 +41,15 @@ class ResponseTextAnnotationDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_text.annotation.added"] required :type, const: :"response.output_text.annotation.added" - # @!parse - # # Emitted when a text annotation is added. - # # - # # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] - # # @param annotation_index [Integer] - # # @param content_index [Integer] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.output_text.annotation.added"] - # # - # def initialize( - # annotation:, - # annotation_index:, - # content_index:, - # item_id:, - # output_index:, - # type: :"response.output_text.annotation.added", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(annotation:, annotation_index:, content_index:, item_id:, output_index:, type: :"response.output_text.annotation.added") + # Emitted when a text annotation is added. + # + # @param annotation [OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath] + # @param annotation_index [Integer] + # @param content_index [Integer] + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.output_text.annotation.added"] # A citation to a file. # @@ -104,16 +90,12 @@ class FileCitation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_citation] required :type, const: :file_citation - # @!parse - # # A citation to a file. - # # - # # @param file_id [String] - # # @param index [Integer] - # # @param type [Symbol, :file_citation] - # # - # def initialize(file_id:, index:, type: :file_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, index:, type: :file_citation) + # A citation to a file. + # + # @param file_id [String] + # @param index [Integer] + # @param type [Symbol, :file_citation] end class URLCitation < OpenAI::Internal::Type::BaseModel @@ -147,18 +129,14 @@ class URLCitation < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!parse - # # A citation for a web resource used to generate a model response. - # # - # # @param end_index [Integer] - # # @param start_index [Integer] - # # @param title [String] - # # @param url [String] - # # @param type [Symbol, :url_citation] - # # - # def initialize(end_index:, start_index:, title:, url:, type: :url_citation, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(end_index:, start_index:, title:, url:, type: :url_citation) + # A citation for a web resource used to generate a model response. + # + # @param end_index [Integer] + # @param start_index [Integer] + # @param title [String] + # @param url [String] + # @param type [Symbol, :url_citation] end class FilePath < OpenAI::Internal::Type::BaseModel @@ -180,21 +158,16 @@ class FilePath < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_path] required :type, const: :file_path - # @!parse - # # A path to a file. - # # - # # @param file_id [String] - # # @param index [Integer] - # # @param type [Symbol, :file_path] - # # - # def initialize(file_id:, index:, type: :file_path, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, index:, type: :file_path) + # A path to a file. + # + # @param file_id [String] + # @param index [Integer] + # @param type [Symbol, :file_path] end - # @!parse - # # @return [Array(OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FileCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::URLCitation, OpenAI::Models::Responses::ResponseTextAnnotationDeltaEvent::Annotation::FilePath)] end end end diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index 7901b8fb..cd0bf58f 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -26,18 +26,14 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] # attr_writer :format_ - # @!parse - # # Configuration options for a text response from the model. Can be plain text or - # # structured JSON data. Learn more: - # # - # # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) - # # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) - # # - # # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] - # # - # def initialize(format_: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(format_: nil) + # Configuration options for a text response from the model. Can be plain text or + # structured JSON data. Learn more: + # + # - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + # - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + # + # @param format_ [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] end end end diff --git a/lib/openai/models/responses/response_text_delta_event.rb b/lib/openai/models/responses/response_text_delta_event.rb index 8b6c4b75..9f7744b8 100644 --- a/lib/openai/models/responses/response_text_delta_event.rb +++ b/lib/openai/models/responses/response_text_delta_event.rb @@ -34,18 +34,14 @@ class ResponseTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_text.delta"] required :type, const: :"response.output_text.delta" - # @!parse - # # Emitted when there is an additional text delta. - # # - # # @param content_index [Integer] - # # @param delta [String] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.output_text.delta"] - # # - # def initialize(content_index:, delta:, item_id:, output_index:, type: :"response.output_text.delta", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, delta:, item_id:, output_index:, type: :"response.output_text.delta") + # Emitted when there is an additional text delta. + # + # @param content_index [Integer] + # @param delta [String] + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.output_text.delta"] end end end diff --git a/lib/openai/models/responses/response_text_done_event.rb b/lib/openai/models/responses/response_text_done_event.rb index 45a3267a..cea42efc 100644 --- a/lib/openai/models/responses/response_text_done_event.rb +++ b/lib/openai/models/responses/response_text_done_event.rb @@ -34,18 +34,14 @@ class ResponseTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.output_text.done"] required :type, const: :"response.output_text.done" - # @!parse - # # Emitted when text content is finalized. - # # - # # @param content_index [Integer] - # # @param item_id [String] - # # @param output_index [Integer] - # # @param text [String] - # # @param type [Symbol, :"response.output_text.done"] - # # - # def initialize(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(content_index:, item_id:, output_index:, text:, type: :"response.output_text.done") + # Emitted when text content is finalized. + # + # @param content_index [Integer] + # @param item_id [String] + # @param output_index [Integer] + # @param text [String] + # @param type [Symbol, :"response.output_text.done"] end end end diff --git a/lib/openai/models/responses/response_usage.rb b/lib/openai/models/responses/response_usage.rb index 2a2ecd8f..f8a7799f 100644 --- a/lib/openai/models/responses/response_usage.rb +++ b/lib/openai/models/responses/response_usage.rb @@ -34,19 +34,15 @@ class ResponseUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!parse - # # Represents token usage details including input tokens, output tokens, a - # # breakdown of output tokens, and the total tokens used. - # # - # # @param input_tokens [Integer] - # # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] - # # @param output_tokens [Integer] - # # @param output_tokens_details [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] - # # @param total_tokens [Integer] - # # - # def initialize(input_tokens:, input_tokens_details:, output_tokens:, output_tokens_details:, total_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(input_tokens:, input_tokens_details:, output_tokens:, output_tokens_details:, total_tokens:) + # Represents token usage details including input tokens, output tokens, a + # breakdown of output tokens, and the total tokens used. + # + # @param input_tokens [Integer] + # @param input_tokens_details [OpenAI::Models::Responses::ResponseUsage::InputTokensDetails] + # @param output_tokens [Integer] + # @param output_tokens_details [OpenAI::Models::Responses::ResponseUsage::OutputTokensDetails] + # @param total_tokens [Integer] # @see OpenAI::Models::Responses::ResponseUsage#input_tokens_details class InputTokensDetails < OpenAI::Internal::Type::BaseModel @@ -57,14 +53,10 @@ class InputTokensDetails < OpenAI::Internal::Type::BaseModel # @return [Integer] required :cached_tokens, Integer - # @!parse - # # A detailed breakdown of the input tokens. - # # - # # @param cached_tokens [Integer] - # # - # def initialize(cached_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cached_tokens:) + # A detailed breakdown of the input tokens. + # + # @param cached_tokens [Integer] end # @see OpenAI::Models::Responses::ResponseUsage#output_tokens_details @@ -75,14 +67,10 @@ class OutputTokensDetails < OpenAI::Internal::Type::BaseModel # @return [Integer] required :reasoning_tokens, Integer - # @!parse - # # A detailed breakdown of the output tokens. - # # - # # @param reasoning_tokens [Integer] - # # - # def initialize(reasoning_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(reasoning_tokens:) + # A detailed breakdown of the output tokens. + # + # @param reasoning_tokens [Integer] end end end diff --git a/lib/openai/models/responses/response_web_search_call_completed_event.rb b/lib/openai/models/responses/response_web_search_call_completed_event.rb index 70e0dc19..59cdab36 100644 --- a/lib/openai/models/responses/response_web_search_call_completed_event.rb +++ b/lib/openai/models/responses/response_web_search_call_completed_event.rb @@ -22,16 +22,12 @@ class ResponseWebSearchCallCompletedEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.web_search_call.completed"] required :type, const: :"response.web_search_call.completed" - # @!parse - # # Emitted when a web search call is completed. - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.web_search_call.completed"] - # # - # def initialize(item_id:, output_index:, type: :"response.web_search_call.completed", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.completed") + # Emitted when a web search call is completed. + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.web_search_call.completed"] end end end diff --git a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb index fc8f006a..6820c819 100644 --- a/lib/openai/models/responses/response_web_search_call_in_progress_event.rb +++ b/lib/openai/models/responses/response_web_search_call_in_progress_event.rb @@ -22,16 +22,12 @@ class ResponseWebSearchCallInProgressEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.web_search_call.in_progress"] required :type, const: :"response.web_search_call.in_progress" - # @!parse - # # Emitted when a web search call is initiated. - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.web_search_call.in_progress"] - # # - # def initialize(item_id:, output_index:, type: :"response.web_search_call.in_progress", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.in_progress") + # Emitted when a web search call is initiated. + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.web_search_call.in_progress"] end end end diff --git a/lib/openai/models/responses/response_web_search_call_searching_event.rb b/lib/openai/models/responses/response_web_search_call_searching_event.rb index 39d6ae9f..efa04758 100644 --- a/lib/openai/models/responses/response_web_search_call_searching_event.rb +++ b/lib/openai/models/responses/response_web_search_call_searching_event.rb @@ -22,16 +22,12 @@ class ResponseWebSearchCallSearchingEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"response.web_search_call.searching"] required :type, const: :"response.web_search_call.searching" - # @!parse - # # Emitted when a web search call is executing. - # # - # # @param item_id [String] - # # @param output_index [Integer] - # # @param type [Symbol, :"response.web_search_call.searching"] - # # - # def initialize(item_id:, output_index:, type: :"response.web_search_call.searching", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(item_id:, output_index:, type: :"response.web_search_call.searching") + # Emitted when a web search call is executing. + # + # @param item_id [String] + # @param output_index [Integer] + # @param type [Symbol, :"response.web_search_call.searching"] end end end diff --git a/lib/openai/models/responses/tool.rb b/lib/openai/models/responses/tool.rb index e9be0652..5d053a5e 100644 --- a/lib/openai/models/responses/tool.rb +++ b/lib/openai/models/responses/tool.rb @@ -27,9 +27,8 @@ module Tool # Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search). variant -> { OpenAI::Models::Responses::WebSearchTool } - # @!parse - # # @return [Array(OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::Responses::FileSearchTool, OpenAI::Models::Responses::FunctionTool, OpenAI::Models::Responses::ComputerTool, OpenAI::Models::Responses::WebSearchTool)] end end end diff --git a/lib/openai/models/responses/tool_choice_function.rb b/lib/openai/models/responses/tool_choice_function.rb index ca4d89b6..47c1d3ef 100644 --- a/lib/openai/models/responses/tool_choice_function.rb +++ b/lib/openai/models/responses/tool_choice_function.rb @@ -16,15 +16,11 @@ class ToolChoiceFunction < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!parse - # # Use this option to force the model to call a specific function. - # # - # # @param name [String] - # # @param type [Symbol, :function] - # # - # def initialize(name:, type: :function, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(name:, type: :function) + # Use this option to force the model to call a specific function. + # + # @param name [String] + # @param type [Symbol, :function] end end end diff --git a/lib/openai/models/responses/tool_choice_options.rb b/lib/openai/models/responses/tool_choice_options.rb index 789817e8..f43db682 100644 --- a/lib/openai/models/responses/tool_choice_options.rb +++ b/lib/openai/models/responses/tool_choice_options.rb @@ -18,11 +18,8 @@ module ToolChoiceOptions AUTO = :auto REQUIRED = :required - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/tool_choice_types.rb b/lib/openai/models/responses/tool_choice_types.rb index d26c027f..e51b376e 100644 --- a/lib/openai/models/responses/tool_choice_types.rb +++ b/lib/openai/models/responses/tool_choice_types.rb @@ -17,15 +17,11 @@ class ToolChoiceTypes < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] required :type, enum: -> { OpenAI::Models::Responses::ToolChoiceTypes::Type } - # @!parse - # # Indicates that the model should use a built-in tool to generate a response. - # # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). - # # - # # @param type [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] - # # - # def initialize(type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:) + # Indicates that the model should use a built-in tool to generate a response. + # [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). + # + # @param type [Symbol, OpenAI::Models::Responses::ToolChoiceTypes::Type] # The type of hosted tool the model should to use. Learn more about # [built-in tools](https://platform.openai.com/docs/guides/tools). @@ -45,11 +41,8 @@ module Type COMPUTER_USE_PREVIEW = :computer_use_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 90f84d39..3ed57a22 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -29,18 +29,14 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] optional :user_location, -> { OpenAI::Models::Responses::WebSearchTool::UserLocation }, nil?: true - # @!parse - # # This tool searches the web for relevant results to use in a response. Learn more - # # about the - # # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). - # # - # # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] - # # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] - # # @param user_location [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] - # # - # def initialize(type:, search_context_size: nil, user_location: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(type:, search_context_size: nil, user_location: nil) + # This tool searches the web for relevant results to use in a response. Learn more + # about the + # [web search tool](https://platform.openai.com/docs/guides/tools-web-search). + # + # @param type [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] + # @param search_context_size [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] + # @param user_location [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] # The type of the web search tool. One of: # @@ -54,11 +50,8 @@ module Type WEB_SEARCH_PREVIEW = :web_search_preview WEB_SEARCH_PREVIEW_2025_03_11 = :web_search_preview_2025_03_11 - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # High level guidance for the amount of context window space to use for the @@ -72,11 +65,8 @@ module SearchContextSize MEDIUM = :medium HIGH = :high - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::Responses::WebSearchTool#user_location @@ -129,16 +119,12 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :timezone - # @!parse - # # @param city [String] - # # @param country [String] - # # @param region [String] - # # @param timezone [String] - # # @param type [Symbol, :approximate] - # # - # def initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate) + # @param city [String] + # @param country [String] + # @param region [String] + # @param timezone [String] + # @param type [Symbol, :approximate] end end end diff --git a/lib/openai/models/responses_model.rb b/lib/openai/models/responses_model.rb index 588d1722..5984103a 100644 --- a/lib/openai/models/responses_model.rb +++ b/lib/openai/models/responses_model.rb @@ -19,16 +19,12 @@ module ResponsesOnlyModel COMPUTER_USE_PREVIEW = :"computer-use-preview" COMPUTER_USE_PREVIEW_2025_03_11 = :"computer-use-preview-2025-03-11" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end - # @!parse - # # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Symbol, OpenAI::Models::ChatModel, Symbol, OpenAI::Models::ResponsesModel::ResponsesOnlyModel)] end end end diff --git a/lib/openai/models/static_file_chunking_strategy.rb b/lib/openai/models/static_file_chunking_strategy.rb index 9bff61c3..c8dc5106 100644 --- a/lib/openai/models/static_file_chunking_strategy.rb +++ b/lib/openai/models/static_file_chunking_strategy.rb @@ -18,13 +18,9 @@ class StaticFileChunkingStrategy < OpenAI::Internal::Type::BaseModel # @return [Integer] required :max_chunk_size_tokens, Integer - # @!parse - # # @param chunk_overlap_tokens [Integer] - # # @param max_chunk_size_tokens [Integer] - # # - # def initialize(chunk_overlap_tokens:, max_chunk_size_tokens:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunk_overlap_tokens:, max_chunk_size_tokens:) + # @param chunk_overlap_tokens [Integer] + # @param max_chunk_size_tokens [Integer] end end end diff --git a/lib/openai/models/static_file_chunking_strategy_object.rb b/lib/openai/models/static_file_chunking_strategy_object.rb index 1655679f..7da9cb50 100644 --- a/lib/openai/models/static_file_chunking_strategy_object.rb +++ b/lib/openai/models/static_file_chunking_strategy_object.rb @@ -14,13 +14,9 @@ class StaticFileChunkingStrategyObject < OpenAI::Internal::Type::BaseModel # @return [Symbol, :static] required :type, const: :static - # @!parse - # # @param static [OpenAI::Models::StaticFileChunkingStrategy] - # # @param type [Symbol, :static] - # # - # def initialize(static:, type: :static, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(static:, type: :static) + # @param static [OpenAI::Models::StaticFileChunkingStrategy] + # @param type [Symbol, :static] end end end diff --git a/lib/openai/models/static_file_chunking_strategy_object_param.rb b/lib/openai/models/static_file_chunking_strategy_object_param.rb index f64fff68..b0d5a5b6 100644 --- a/lib/openai/models/static_file_chunking_strategy_object_param.rb +++ b/lib/openai/models/static_file_chunking_strategy_object_param.rb @@ -14,15 +14,11 @@ class StaticFileChunkingStrategyObjectParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :static] required :type, const: :static - # @!parse - # # Customize your own chunking strategy by setting chunk size and chunk overlap. - # # - # # @param static [OpenAI::Models::StaticFileChunkingStrategy] - # # @param type [Symbol, :static] - # # - # def initialize(static:, type: :static, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(static:, type: :static) + # Customize your own chunking strategy by setting chunk size and chunk overlap. + # + # @param static [OpenAI::Models::StaticFileChunkingStrategy] + # @param type [Symbol, :static] end end end diff --git a/lib/openai/models/upload.rb b/lib/openai/models/upload.rb index 9151584f..53ae0a23 100644 --- a/lib/openai/models/upload.rb +++ b/lib/openai/models/upload.rb @@ -60,22 +60,18 @@ class Upload < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::FileObject, nil] optional :file, -> { OpenAI::Models::FileObject }, nil?: true - # @!parse - # # The Upload object can accept byte chunks in the form of Parts. - # # - # # @param id [String] - # # @param bytes [Integer] - # # @param created_at [Integer] - # # @param expires_at [Integer] - # # @param filename [String] - # # @param purpose [String] - # # @param status [Symbol, OpenAI::Models::Upload::Status] - # # @param file [OpenAI::Models::FileObject, nil] - # # @param object [Symbol, :upload] - # # - # def initialize(id:, bytes:, created_at:, expires_at:, filename:, purpose:, status:, file: nil, object: :upload, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, bytes:, created_at:, expires_at:, filename:, purpose:, status:, file: nil, object: :upload) + # The Upload object can accept byte chunks in the form of Parts. + # + # @param id [String] + # @param bytes [Integer] + # @param created_at [Integer] + # @param expires_at [Integer] + # @param filename [String] + # @param purpose [String] + # @param status [Symbol, OpenAI::Models::Upload::Status] + # @param file [OpenAI::Models::FileObject, nil] + # @param object [Symbol, :upload] # The status of the Upload. # @@ -88,11 +84,8 @@ module Status CANCELLED = :cancelled EXPIRED = :expired - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/upload_cancel_params.rb b/lib/openai/models/upload_cancel_params.rb index 7c44f8c9..1b7164e8 100644 --- a/lib/openai/models/upload_cancel_params.rb +++ b/lib/openai/models/upload_cancel_params.rb @@ -8,12 +8,8 @@ class UploadCancelParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index 3b8eb1b5..77f01df1 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -25,14 +25,10 @@ class UploadCompleteParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :md5 - # @!parse - # # @param part_ids [Array] - # # @param md5 [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(part_ids:, md5: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(part_ids:, md5: nil, request_options: {}) + # @param part_ids [Array] + # @param md5 [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index aa01ef38..0c7d54d5 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -38,16 +38,12 @@ class UploadCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::FilePurpose] required :purpose, enum: -> { OpenAI::Models::FilePurpose } - # @!parse - # # @param bytes [Integer] - # # @param filename [String] - # # @param mime_type [String] - # # @param purpose [Symbol, OpenAI::Models::FilePurpose] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(bytes:, filename:, mime_type:, purpose:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(bytes:, filename:, mime_type:, purpose:, request_options: {}) + # @param bytes [Integer] + # @param filename [String] + # @param mime_type [String] + # @param purpose [Symbol, OpenAI::Models::FilePurpose] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index e6fd5ad7..1e11840f 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -15,13 +15,9 @@ class PartCreateParams < OpenAI::Internal::Type::BaseModel # @return [Pathname, StringIO] required :data, OpenAI::Internal::Type::IOLike - # @!parse - # # @param data [Pathname, StringIO] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(data:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(data:, request_options: {}) + # @param data [Pathname, StringIO] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/uploads/upload_part.rb b/lib/openai/models/uploads/upload_part.rb index 4f839461..fa189119 100644 --- a/lib/openai/models/uploads/upload_part.rb +++ b/lib/openai/models/uploads/upload_part.rb @@ -29,17 +29,13 @@ class UploadPart < OpenAI::Internal::Type::BaseModel # @return [String] required :upload_id, String - # @!parse - # # The upload Part represents a chunk of bytes we can add to an Upload object. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param upload_id [String] - # # @param object [Symbol, :"upload.part"] - # # - # def initialize(id:, created_at:, upload_id:, object: :"upload.part", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, upload_id:, object: :"upload.part") + # The upload Part represents a chunk of bytes we can add to an Upload object. + # + # @param id [String] + # @param created_at [Integer] + # @param upload_id [String] + # @param object [Symbol, :"upload.part"] end end diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 9abda127..5ad255fe 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -80,40 +80,21 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :expires_at, Integer, nil?: true - # @!parse - # # A vector store is a collection of processed files can be used by the - # # `file_search` tool. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param file_counts [OpenAI::Models::VectorStore::FileCounts] - # # @param last_active_at [Integer, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param status [Symbol, OpenAI::Models::VectorStore::Status] - # # @param usage_bytes [Integer] - # # @param expires_after [OpenAI::Models::VectorStore::ExpiresAfter] - # # @param expires_at [Integer, nil] - # # @param object [Symbol, :vector_store] - # # - # def initialize( - # id:, - # created_at:, - # file_counts:, - # last_active_at:, - # metadata:, - # name:, - # status:, - # usage_bytes:, - # expires_after: nil, - # expires_at: nil, - # object: :vector_store, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, file_counts:, last_active_at:, metadata:, name:, status:, usage_bytes:, expires_after: nil, expires_at: nil, object: :vector_store) + # A vector store is a collection of processed files can be used by the + # `file_search` tool. + # + # @param id [String] + # @param created_at [Integer] + # @param file_counts [OpenAI::Models::VectorStore::FileCounts] + # @param last_active_at [Integer, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param status [Symbol, OpenAI::Models::VectorStore::Status] + # @param usage_bytes [Integer] + # @param expires_after [OpenAI::Models::VectorStore::ExpiresAfter] + # @param expires_at [Integer, nil] + # @param object [Symbol, :vector_store] # @see OpenAI::Models::VectorStore#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel @@ -147,16 +128,12 @@ class FileCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # @param cancelled [Integer] - # # @param completed [Integer] - # # @param failed [Integer] - # # @param in_progress [Integer] - # # @param total [Integer] - # # - # def initialize(cancelled:, completed:, failed:, in_progress:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cancelled:, completed:, failed:, in_progress:, total:) + # @param cancelled [Integer] + # @param completed [Integer] + # @param failed [Integer] + # @param in_progress [Integer] + # @param total [Integer] end # The status of the vector store, which can be either `expired`, `in_progress`, or @@ -171,11 +148,8 @@ module Status IN_PROGRESS = :in_progress COMPLETED = :completed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # @see OpenAI::Models::VectorStore#expires_after @@ -193,15 +167,11 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @return [Integer] required :days, Integer - # @!parse - # # The expiration policy for a vector store. - # # - # # @param days [Integer] - # # @param anchor [Symbol, :last_active_at] - # # - # def initialize(days:, anchor: :last_active_at, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(days:, anchor: :last_active_at) + # The expiration policy for a vector store. + # + # @param days [Integer] + # @param anchor [Symbol, :last_active_at] end end end diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index 8380c18b..2899d54e 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -62,27 +62,13 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :name - # @!parse - # # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] - # # @param file_ids [Array] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # chunking_strategy: nil, - # expires_after: nil, - # file_ids: nil, - # metadata: nil, - # name: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] + # @param file_ids [Array] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor @@ -98,15 +84,11 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @return [Integer] required :days, Integer - # @!parse - # # The expiration policy for a vector store. - # # - # # @param days [Integer] - # # @param anchor [Symbol, :last_active_at] - # # - # def initialize(days:, anchor: :last_active_at, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(days:, anchor: :last_active_at) + # The expiration policy for a vector store. + # + # @param days [Integer] + # @param anchor [Symbol, :last_active_at] end end end diff --git a/lib/openai/models/vector_store_delete_params.rb b/lib/openai/models/vector_store_delete_params.rb index 5beedec7..e307e25d 100644 --- a/lib/openai/models/vector_store_delete_params.rb +++ b/lib/openai/models/vector_store_delete_params.rb @@ -8,12 +8,8 @@ class VectorStoreDeleteParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_store_deleted.rb b/lib/openai/models/vector_store_deleted.rb index b1624e84..ecc812bc 100644 --- a/lib/openai/models/vector_store_deleted.rb +++ b/lib/openai/models/vector_store_deleted.rb @@ -19,14 +19,10 @@ class VectorStoreDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"vector_store.deleted"] required :object, const: :"vector_store.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"vector_store.deleted"] - # # - # def initialize(id:, deleted:, object: :"vector_store.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"vector_store.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"vector_store.deleted"] end end end diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 345b2830..610dd889 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -56,16 +56,12 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::VectorStoreListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param before [String] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::VectorStoreListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. @@ -75,11 +71,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_store_retrieve_params.rb b/lib/openai/models/vector_store_retrieve_params.rb index f3b3b098..aaf50986 100644 --- a/lib/openai/models/vector_store_retrieve_params.rb +++ b/lib/openai/models/vector_store_retrieve_params.rb @@ -8,12 +8,8 @@ class VectorStoreRetrieveParams < OpenAI::Internal::Type::BaseModel # extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!parse - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(request_options: {}) + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 653db38a..9807ed6a 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -55,27 +55,13 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # # @return [Boolean] # attr_writer :rewrite_query - # @!parse - # # @param query [String, Array] - # # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # # @param max_num_results [Integer] - # # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] - # # @param rewrite_query [Boolean] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize( - # query:, - # filters: nil, - # max_num_results: nil, - # ranking_options: nil, - # rewrite_query: nil, - # request_options: {}, - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(query:, filters: nil, max_num_results: nil, ranking_options: nil, rewrite_query: nil, request_options: {}) + # @param query [String, Array] + # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] + # @param max_num_results [Integer] + # @param ranking_options [OpenAI::Models::VectorStoreSearchParams::RankingOptions] + # @param rewrite_query [Boolean] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # A query string for a search module Query @@ -85,9 +71,8 @@ module Query variant -> { OpenAI::Models::VectorStoreSearchParams::Query::StringArray } - # @!parse - # # @return [Array(String, Array)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Array)] StringArray = OpenAI::Internal::Type::ArrayOf[String] end @@ -102,9 +87,8 @@ module Filters # Combine multiple filters using `and` or `or`. variant -> { OpenAI::Models::CompoundFilter } - # @!parse - # # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] - # def self.variants; end + # @!method self.variants + # @return [Array(OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter)] end class RankingOptions < OpenAI::Internal::Type::BaseModel @@ -126,15 +110,11 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # # @return [Float] # attr_writer :score_threshold - # @!parse - # # Ranking options for search. - # # - # # @param ranker [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] - # # @param score_threshold [Float] - # # - # def initialize(ranker: nil, score_threshold: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(ranker: nil, score_threshold: nil) + # Ranking options for search. + # + # @param ranker [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] + # @param score_threshold [Float] # @see OpenAI::Models::VectorStoreSearchParams::RankingOptions#ranker module Ranker @@ -143,11 +123,8 @@ module Ranker AUTO = :auto DEFAULT_2024_11_15 = :"default-2024-11-15" - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_store_search_response.rb b/lib/openai/models/vector_store_search_response.rb index 6b076e29..d06a0c95 100644 --- a/lib/openai/models/vector_store_search_response.rb +++ b/lib/openai/models/vector_store_search_response.rb @@ -41,16 +41,12 @@ class VectorStoreSearchResponse < OpenAI::Internal::Type::BaseModel # @return [Float] required :score, Float - # @!parse - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param content [Array] - # # @param file_id [String] - # # @param filename [String] - # # @param score [Float] - # # - # def initialize(attributes:, content:, file_id:, filename:, score:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(attributes:, content:, file_id:, filename:, score:) + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param content [Array] + # @param file_id [String] + # @param filename [String] + # @param score [Float] module Attribute extend OpenAI::Internal::Type::Union @@ -61,9 +57,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end class Content < OpenAI::Internal::Type::BaseModel @@ -79,13 +74,9 @@ class Content < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type] required :type, enum: -> { OpenAI::Models::VectorStoreSearchResponse::Content::Type } - # @!parse - # # @param text [String] - # # @param type [Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type] - # # - # def initialize(text:, type:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text:, type:) + # @param text [String] + # @param type [Symbol, OpenAI::Models::VectorStoreSearchResponse::Content::Type] # The type of content. # @@ -95,11 +86,8 @@ module Type TEXT = :text - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 145e2808..9bda9d94 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -31,15 +31,11 @@ class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :name, String, nil?: true - # @!parse - # # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] - # # @param metadata [Hash{Symbol=>String}, nil] - # # @param name [String, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(expires_after: nil, metadata: nil, name: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(expires_after: nil, metadata: nil, name: nil, request_options: {}) + # @param expires_after [OpenAI::Models::VectorStoreUpdateParams::ExpiresAfter, nil] + # @param metadata [Hash{Symbol=>String}, nil] + # @param name [String, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @!attribute anchor @@ -55,15 +51,11 @@ class ExpiresAfter < OpenAI::Internal::Type::BaseModel # @return [Integer] required :days, Integer - # @!parse - # # The expiration policy for a vector store. - # # - # # @param days [Integer] - # # @param anchor [Symbol, :last_active_at] - # # - # def initialize(days:, anchor: :last_active_at, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(days:, anchor: :last_active_at) + # The expiration policy for a vector store. + # + # @param days [Integer] + # @param anchor [Symbol, :last_active_at] end end end diff --git a/lib/openai/models/vector_stores/file_batch_cancel_params.rb b/lib/openai/models/vector_stores/file_batch_cancel_params.rb index a64a4b9c..c79d293e 100644 --- a/lib/openai/models/vector_stores/file_batch_cancel_params.rb +++ b/lib/openai/models/vector_stores/file_batch_cancel_params.rb @@ -14,13 +14,9 @@ class FileBatchCancelParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # @param vector_store_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, request_options: {}) + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index 31d32b92..f0517a4c 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -40,15 +40,11 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] # attr_writer :chunking_strategy - # @!parse - # # @param file_ids [Array] - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) + # @param file_ids [Array] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] module Attribute extend OpenAI::Internal::Type::Union @@ -59,9 +55,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index 0f0bacb3..a99326c3 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -72,18 +72,14 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] # attr_writer :order - # @!parse - # # @param vector_store_id [String] - # # @param after [String] - # # @param before [String] - # # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) + # @param vector_store_id [String] + # @param after [String] + # @param before [String] + # @param filter [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter @@ -94,11 +90,8 @@ module Filter FAILED = :failed CANCELLED = :cancelled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -109,11 +102,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb index 55499481..48cf6115 100644 --- a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb @@ -14,13 +14,9 @@ class FileBatchRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # @param vector_store_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, request_options: {}) + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_stores/file_content_params.rb b/lib/openai/models/vector_stores/file_content_params.rb index 26be8b94..e4f3deec 100644 --- a/lib/openai/models/vector_stores/file_content_params.rb +++ b/lib/openai/models/vector_stores/file_content_params.rb @@ -14,13 +14,9 @@ class FileContentParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # @param vector_store_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, request_options: {}) + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_stores/file_content_response.rb b/lib/openai/models/vector_stores/file_content_response.rb index 01c19380..b4924ae1 100644 --- a/lib/openai/models/vector_stores/file_content_response.rb +++ b/lib/openai/models/vector_stores/file_content_response.rb @@ -25,13 +25,9 @@ class FileContentResponse < OpenAI::Internal::Type::BaseModel # # @return [String] # attr_writer :type - # @!parse - # # @param text [String] - # # @param type [String] - # # - # def initialize(text: nil, type: nil, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(text: nil, type: nil) + # @param text [String] + # @param type [String] end end end diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index 6767c7dc..d8307a70 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -40,15 +40,11 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] # attr_writer :chunking_strategy - # @!parse - # # @param file_id [String] - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) + # @param file_id [String] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] module Attribute extend OpenAI::Internal::Type::Union @@ -59,9 +55,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/vector_stores/file_delete_params.rb b/lib/openai/models/vector_stores/file_delete_params.rb index e4bee072..25a5fbcc 100644 --- a/lib/openai/models/vector_stores/file_delete_params.rb +++ b/lib/openai/models/vector_stores/file_delete_params.rb @@ -14,13 +14,9 @@ class FileDeleteParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # @param vector_store_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, request_options: {}) + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 88d3a55c..0c80decc 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -67,17 +67,13 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] # attr_writer :order - # @!parse - # # @param after [String] - # # @param before [String] - # # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] - # # @param limit [Integer] - # # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) + # @param after [String] + # @param before [String] + # @param filter [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] + # @param limit [Integer] + # @param order [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. module Filter @@ -88,11 +84,8 @@ module Filter FAILED = :failed CANCELLED = :cancelled - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end # Sort order by the `created_at` timestamp of the objects. `asc` for ascending @@ -103,11 +96,8 @@ module Order ASC = :asc DESC = :desc - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_stores/file_retrieve_params.rb b/lib/openai/models/vector_stores/file_retrieve_params.rb index 9d9c26a7..2b63ee84 100644 --- a/lib/openai/models/vector_stores/file_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_retrieve_params.rb @@ -14,13 +14,9 @@ class FileRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # @param vector_store_id [String] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, request_options: {}) + # @param vector_store_id [String] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] end end end diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 2e5f4d52..9cf5a31a 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -26,14 +26,10 @@ class FileUpdateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileUpdateParams::Attribute] }, nil?: true - # @!parse - # # @param vector_store_id [String] - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] - # # - # def initialize(vector_store_id:, attributes:, request_options: {}, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(vector_store_id:, attributes:, request_options: {}) + # @param vector_store_id [String] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] module Attribute extend OpenAI::Internal::Type::Union @@ -44,9 +40,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index 71ba4e7d..ae51122a 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -76,35 +76,18 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel # # @return [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] # attr_writer :chunking_strategy - # @!parse - # # A list of files attached to a vector store. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param last_error [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] - # # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] - # # @param usage_bytes [Integer] - # # @param vector_store_id [String] - # # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] - # # @param chunking_strategy [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] - # # @param object [Symbol, :"vector_store.file"] - # # - # def initialize( - # id:, - # created_at:, - # last_error:, - # status:, - # usage_bytes:, - # vector_store_id:, - # attributes: nil, - # chunking_strategy: nil, - # object: :"vector_store.file", - # ** - # ) - # super - # end - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, last_error:, status:, usage_bytes:, vector_store_id:, attributes: nil, chunking_strategy: nil, object: :"vector_store.file") + # A list of files attached to a vector store. + # + # @param id [String] + # @param created_at [Integer] + # @param last_error [OpenAI::Models::VectorStores::VectorStoreFile::LastError, nil] + # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::Status] + # @param usage_bytes [Integer] + # @param vector_store_id [String] + # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] + # @param chunking_strategy [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] + # @param object [Symbol, :"vector_store.file"] # @see OpenAI::Models::VectorStores::VectorStoreFile#last_error class LastError < OpenAI::Internal::Type::BaseModel @@ -120,16 +103,12 @@ class LastError < OpenAI::Internal::Type::BaseModel # @return [String] required :message, String - # @!parse - # # The last error associated with this vector store file. Will be `null` if there - # # are no errors. - # # - # # @param code [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] - # # @param message [String] - # # - # def initialize(code:, message:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(code:, message:) + # The last error associated with this vector store file. Will be `null` if there + # are no errors. + # + # @param code [Symbol, OpenAI::Models::VectorStores::VectorStoreFile::LastError::Code] + # @param message [String] # One of `server_error` or `rate_limit_exceeded`. # @@ -141,11 +120,8 @@ module Code UNSUPPORTED_FILE = :unsupported_file INVALID_FILE = :invalid_file - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end @@ -162,11 +138,8 @@ module Status CANCELLED = :cancelled FAILED = :failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end module Attribute @@ -178,9 +151,8 @@ module Attribute variant OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Array(String, Float, Boolean)] - # def self.variants; end + # @!method self.variants + # @return [Array(String, Float, Boolean)] end end end diff --git a/lib/openai/models/vector_stores/vector_store_file_batch.rb b/lib/openai/models/vector_stores/vector_store_file_batch.rb index 55e8644b..ee7fb5ea 100644 --- a/lib/openai/models/vector_stores/vector_store_file_batch.rb +++ b/lib/openai/models/vector_stores/vector_store_file_batch.rb @@ -45,19 +45,15 @@ class VectorStoreFileBatch < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!parse - # # A batch of files attached to a vector store. - # # - # # @param id [String] - # # @param created_at [Integer] - # # @param file_counts [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] - # # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] - # # @param vector_store_id [String] - # # @param object [Symbol, :"vector_store.files_batch"] - # # - # def initialize(id:, created_at:, file_counts:, status:, vector_store_id:, object: :"vector_store.files_batch", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, created_at:, file_counts:, status:, vector_store_id:, object: :"vector_store.files_batch") + # A batch of files attached to a vector store. + # + # @param id [String] + # @param created_at [Integer] + # @param file_counts [OpenAI::Models::VectorStores::VectorStoreFileBatch::FileCounts] + # @param status [Symbol, OpenAI::Models::VectorStores::VectorStoreFileBatch::Status] + # @param vector_store_id [String] + # @param object [Symbol, :"vector_store.files_batch"] # @see OpenAI::Models::VectorStores::VectorStoreFileBatch#file_counts class FileCounts < OpenAI::Internal::Type::BaseModel @@ -91,16 +87,12 @@ class FileCounts < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total, Integer - # @!parse - # # @param cancelled [Integer] - # # @param completed [Integer] - # # @param failed [Integer] - # # @param in_progress [Integer] - # # @param total [Integer] - # # - # def initialize(cancelled:, completed:, failed:, in_progress:, total:, **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(cancelled:, completed:, failed:, in_progress:, total:) + # @param cancelled [Integer] + # @param completed [Integer] + # @param failed [Integer] + # @param in_progress [Integer] + # @param total [Integer] end # The status of the vector store files batch, which can be either `in_progress`, @@ -115,11 +107,8 @@ module Status CANCELLED = :cancelled FAILED = :failed - finalize! - - # @!parse - # # @return [Array] - # def self.values; end + # @!method self.values + # @return [Array] end end end diff --git a/lib/openai/models/vector_stores/vector_store_file_deleted.rb b/lib/openai/models/vector_stores/vector_store_file_deleted.rb index f7992170..971629db 100644 --- a/lib/openai/models/vector_stores/vector_store_file_deleted.rb +++ b/lib/openai/models/vector_stores/vector_store_file_deleted.rb @@ -20,14 +20,10 @@ class VectorStoreFileDeleted < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"vector_store.file.deleted"] required :object, const: :"vector_store.file.deleted" - # @!parse - # # @param id [String] - # # @param deleted [Boolean] - # # @param object [Symbol, :"vector_store.file.deleted"] - # # - # def initialize(id:, deleted:, object: :"vector_store.file.deleted", **) = super - - # def initialize: (Hash | OpenAI::Internal::Type::BaseModel) -> void + # @!method initialize(id:, deleted:, object: :"vector_store.file.deleted") + # @param id [String] + # @param deleted [Boolean] + # @param object [Symbol, :"vector_store.file.deleted"] end end diff --git a/lib/openai/request_options.rb b/lib/openai/request_options.rb index f4eaf933..4bd64d4b 100644 --- a/lib/openai/request_options.rb +++ b/lib/openai/request_options.rb @@ -66,10 +66,9 @@ def self.validate!(opts) optional :timeout, Float # @!parse - # # Returns a new instance of RequestOptions. + # # @!method initialize(values = {}) + # # Returns a new instance of RequestOptions. # # - # # @param values [Hash{Symbol=>Object}] - # # - # def initialize(values = {}) = super + # # @param values [Hash{Symbol=>Object}] end end diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index 32f6a62c..bc1959ae 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -176,6 +176,7 @@ module OpenAI def deconstruct_keys(keys); end class << self + # @api private sig { params(model: OpenAI::Internal::Type::BaseModel).returns(OpenAI::Internal::AnyHash) } def walk(model); end end diff --git a/rbi/lib/openai/internal/type/enum.rbi b/rbi/lib/openai/internal/type/enum.rbi index a85d1768..e1d0753c 100644 --- a/rbi/lib/openai/internal/type/enum.rbi +++ b/rbi/lib/openai/internal/type/enum.rbi @@ -22,12 +22,6 @@ module OpenAI sig { overridable.returns(T::Array[T.any(NilClass, T::Boolean, Integer, Float, Symbol)]) } def values; end - # @api private - # - # Guard against thread safety issues by instantiating `@values`. - sig { void } - private def finalize!; end - sig { params(other: T.anything).returns(T::Boolean) } def ===(other); end diff --git a/sig/openai/internal/type/enum.rbs b/sig/openai/internal/type/enum.rbs index 5f707303..4de50b6d 100644 --- a/sig/openai/internal/type/enum.rbs +++ b/sig/openai/internal/type/enum.rbs @@ -6,8 +6,6 @@ module OpenAI def self.values: -> ::Array[(nil | bool | Integer | Float | Symbol)] - private def self.finalize!: -> void - def ===: (top other) -> bool def ==: (top other) -> bool diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index c3b0dbfa..b03987cf 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -156,6 +156,7 @@ def test_dump_retry class OpenAI::Test::EnumModelTest < Minitest::Test class E0 include OpenAI::Internal::Type::Enum + attr_reader :values def initialize(*values) = (@values = values) end @@ -575,6 +576,7 @@ def test_coerce class OpenAI::Test::BaseModelQoLTest < Minitest::Test class E0 include OpenAI::Internal::Type::Enum + attr_reader :values def initialize(*values) = (@values = values) end From 16ec2e391b0cfdf49727099be9afa220c7ab16e5 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 11:43:01 +0000 Subject: [PATCH 10/15] chore: simplify yard annotations by removing most `@!parse` directives --- .../internal/type/request_parameters.rb | 3 +- .../models/audio/speech_create_params.rb | 21 +--- lib/openai/models/audio/transcription.rb | 24 +--- .../audio/transcription_create_params.rb | 39 ++---- .../audio/transcription_text_delta_event.rb | 24 +--- .../audio/transcription_text_done_event.rb | 24 +--- .../models/audio/transcription_verbose.rb | 12 +- .../models/audio/translation_create_params.rb | 21 +--- .../models/audio/translation_verbose.rb | 6 +- lib/openai/models/batch.rb | 84 +++---------- lib/openai/models/batch_cancel_params.rb | 3 +- lib/openai/models/batch_create_params.rb | 3 +- lib/openai/models/batch_error.rb | 12 +- lib/openai/models/batch_list_params.rb | 15 +-- lib/openai/models/batch_retrieve_params.rb | 3 +- lib/openai/models/beta/assistant.rb | 24 +--- .../models/beta/assistant_create_params.rb | 51 ++------ .../models/beta/assistant_delete_params.rb | 3 +- .../models/beta/assistant_list_params.rb | 27 +---- .../models/beta/assistant_retrieve_params.rb | 3 +- .../models/beta/assistant_stream_event.rb | 6 +- .../models/beta/assistant_tool_choice.rb | 6 +- .../models/beta/assistant_update_params.rb | 39 ++---- lib/openai/models/beta/file_search_tool.rb | 24 +--- lib/openai/models/beta/thread.rb | 24 +--- .../beta/thread_create_and_run_params.rb | 99 +++------------- .../models/beta/thread_create_params.rb | 63 ++-------- .../models/beta/thread_delete_params.rb | 3 +- .../models/beta/thread_retrieve_params.rb | 3 +- lib/openai/models/beta/thread_stream_event.rb | 6 +- .../models/beta/thread_update_params.rb | 27 +---- .../threads/file_citation_delta_annotation.rb | 36 +----- .../threads/file_path_delta_annotation.rb | 30 +---- lib/openai/models/beta/threads/image_file.rb | 6 +- .../models/beta/threads/image_file_delta.rb | 12 +- .../beta/threads/image_file_delta_block.rb | 6 +- lib/openai/models/beta/threads/image_url.rb | 6 +- .../models/beta/threads/image_url_delta.rb | 12 +- .../beta/threads/image_url_delta_block.rb | 6 +- lib/openai/models/beta/threads/message.rb | 12 +- .../beta/threads/message_create_params.rb | 15 +-- .../beta/threads/message_delete_params.rb | 3 +- .../models/beta/threads/message_delta.rb | 12 +- .../beta/threads/message_list_params.rb | 33 +----- .../beta/threads/message_retrieve_params.rb | 3 +- .../beta/threads/message_update_params.rb | 3 +- .../beta/threads/refusal_delta_block.rb | 6 +- lib/openai/models/beta/threads/run.rb | 6 +- .../models/beta/threads/run_cancel_params.rb | 3 +- .../models/beta/threads/run_create_params.rb | 27 +---- .../models/beta/threads/run_list_params.rb | 27 +---- .../beta/threads/run_retrieve_params.rb | 3 +- .../threads/run_submit_tool_outputs_params.rb | 15 +-- .../models/beta/threads/run_update_params.rb | 3 +- .../threads/runs/code_interpreter_logs.rb | 6 +- .../runs/code_interpreter_output_image.rb | 12 +- .../runs/code_interpreter_tool_call_delta.rb | 24 +--- .../threads/runs/file_search_tool_call.rb | 30 +---- .../runs/file_search_tool_call_delta.rb | 6 +- .../threads/runs/function_tool_call_delta.rb | 24 +--- .../beta/threads/runs/run_step_delta.rb | 6 +- .../runs/run_step_delta_message_delta.rb | 12 +- .../beta/threads/runs/step_list_params.rb | 33 +----- .../beta/threads/runs/step_retrieve_params.rb | 9 +- .../threads/runs/tool_call_delta_object.rb | 6 +- lib/openai/models/beta/threads/text_delta.rb | 12 +- .../models/beta/threads/text_delta_block.rb | 6 +- lib/openai/models/chat/chat_completion.rb | 12 +- ...chat_completion_assistant_message_param.rb | 12 +- .../models/chat/chat_completion_chunk.rb | 66 ++--------- .../chat/chat_completion_content_part.rb | 18 +-- .../chat_completion_content_part_image.rb | 6 +- ...chat_completion_developer_message_param.rb | 6 +- .../models/chat/chat_completion_message.rb | 18 +-- .../chat/chat_completion_stream_options.rb | 6 +- .../chat_completion_system_message_param.rb | 6 +- .../chat_completion_user_message_param.rb | 6 +- .../models/chat/completion_create_params.rb | 93 +++------------ .../models/chat/completion_delete_params.rb | 3 +- .../models/chat/completion_list_params.rb | 27 +---- .../models/chat/completion_retrieve_params.rb | 3 +- .../models/chat/completion_update_params.rb | 3 +- .../chat/completions/message_list_params.rb | 21 +--- lib/openai/models/completion.rb | 12 +- lib/openai/models/completion_choice.rb | 24 +--- lib/openai/models/completion_create_params.rb | 9 +- lib/openai/models/completion_usage.rb | 48 ++------ lib/openai/models/embedding_create_params.rb | 21 +--- lib/openai/models/eval_create_params.rb | 21 +--- lib/openai/models/eval_delete_params.rb | 3 +- lib/openai/models/eval_list_params.rb | 27 +---- lib/openai/models/eval_retrieve_params.rb | 3 +- .../models/eval_text_similarity_grader.rb | 6 +- lib/openai/models/eval_update_params.rb | 9 +- ...create_eval_completions_run_data_source.rb | 36 +----- .../create_eval_jsonl_run_data_source.rb | 6 +- lib/openai/models/evals/run_cancel_params.rb | 3 +- lib/openai/models/evals/run_create_params.rb | 9 +- lib/openai/models/evals/run_delete_params.rb | 3 +- .../models/evals/run_delete_response.rb | 18 +-- lib/openai/models/evals/run_list_params.rb | 27 +---- .../models/evals/run_retrieve_params.rb | 3 +- .../evals/runs/output_item_list_params.rb | 27 +---- .../evals/runs/output_item_list_response.rb | 12 +- .../evals/runs/output_item_retrieve_params.rb | 3 +- .../runs/output_item_retrieve_response.rb | 12 +- lib/openai/models/file_content_params.rb | 3 +- lib/openai/models/file_create_params.rb | 3 +- lib/openai/models/file_delete_params.rb | 3 +- lib/openai/models/file_list_params.rb | 27 +---- lib/openai/models/file_object.rb | 12 +- lib/openai/models/file_retrieve_params.rb | 3 +- .../checkpoints/permission_create_params.rb | 3 +- .../checkpoints/permission_delete_params.rb | 3 +- .../checkpoints/permission_retrieve_params.rb | 27 +---- .../models/fine_tuning/fine_tuning_job.rb | 96 +++------------ .../fine_tuning/fine_tuning_job_event.rb | 12 +- .../fine_tuning_job_wandb_integration.rb | 6 +- .../models/fine_tuning/job_cancel_params.rb | 3 +- .../models/fine_tuning/job_create_params.rb | 111 +++--------------- .../fine_tuning/job_list_events_params.rb | 15 +-- .../models/fine_tuning/job_list_params.rb | 15 +-- .../models/fine_tuning/job_retrieve_params.rb | 3 +- .../jobs/checkpoint_list_params.rb | 15 +-- .../jobs/fine_tuning_job_checkpoint.rb | 42 ++----- lib/openai/models/function_definition.rb | 12 +- lib/openai/models/image.rb | 18 +-- .../models/image_create_variation_params.rb | 9 +- lib/openai/models/image_edit_params.rb | 15 +-- lib/openai/models/image_generate_params.rb | 15 +-- lib/openai/models/model_delete_params.rb | 3 +- lib/openai/models/model_list_params.rb | 3 +- lib/openai/models/model_retrieve_params.rb | 3 +- lib/openai/models/moderation_create_params.rb | 9 +- .../models/response_format_json_schema.rb | 12 +- .../models/responses/easy_input_message.rb | 6 +- .../models/responses/file_search_tool.rb | 30 +---- .../responses/input_item_list_params.rb | 33 +----- lib/openai/models/responses/response.rb | 30 +---- ...response_computer_tool_call_output_item.rb | 12 +- ...se_computer_tool_call_output_screenshot.rb | 12 +- .../responses/response_create_params.rb | 27 +---- .../responses/response_delete_params.rb | 3 +- .../response_file_search_tool_call.rb | 24 +--- ...response_format_text_json_schema_config.rb | 6 +- .../responses/response_function_tool_call.rb | 12 +- ...response_function_tool_call_output_item.rb | 6 +- .../models/responses/response_input_file.rb | 18 +-- .../models/responses/response_input_item.rb | 42 ++----- .../responses/response_input_message_item.rb | 12 +- .../responses/response_reasoning_item.rb | 6 +- .../responses/response_retrieve_params.rb | 9 +- .../models/responses/response_text_config.rb | 6 +- .../models/responses/web_search_tool.rb | 30 +---- lib/openai/models/upload_cancel_params.rb | 3 +- lib/openai/models/upload_complete_params.rb | 9 +- lib/openai/models/upload_create_params.rb | 3 +- .../models/uploads/part_create_params.rb | 3 +- lib/openai/models/vector_store.rb | 6 +- .../models/vector_store_create_params.rb | 27 +---- .../models/vector_store_delete_params.rb | 3 +- lib/openai/models/vector_store_list_params.rb | 27 +---- .../models/vector_store_retrieve_params.rb | 3 +- .../models/vector_store_search_params.rb | 39 ++---- .../models/vector_store_update_params.rb | 3 +- .../vector_stores/file_batch_cancel_params.rb | 3 +- .../vector_stores/file_batch_create_params.rb | 9 +- .../file_batch_list_files_params.rb | 33 +----- .../file_batch_retrieve_params.rb | 3 +- .../vector_stores/file_content_params.rb | 3 +- .../vector_stores/file_content_response.rb | 12 +- .../vector_stores/file_create_params.rb | 9 +- .../vector_stores/file_delete_params.rb | 3 +- .../models/vector_stores/file_list_params.rb | 33 +----- .../vector_stores/file_retrieve_params.rb | 3 +- .../vector_stores/file_update_params.rb | 3 +- .../models/vector_stores/vector_store_file.rb | 6 +- 177 files changed, 530 insertions(+), 2362 deletions(-) diff --git a/lib/openai/internal/type/request_parameters.rb b/lib/openai/internal/type/request_parameters.rb index aaa7a939..958e1051 100644 --- a/lib/openai/internal/type/request_parameters.rb +++ b/lib/openai/internal/type/request_parameters.rb @@ -12,9 +12,8 @@ module RequestParameters # @param mod [Module] def self.included(mod) - return unless mod <= OpenAI::Internal::Type::BaseModel + raise ArgumentError.new(mod) unless mod <= OpenAI::Internal::Type::BaseModel - mod.extend(OpenAI::Internal::Type::RequestParameters::Converter) mod.optional(:request_options, OpenAI::RequestOptions) end diff --git a/lib/openai/models/audio/speech_create_params.rb b/lib/openai/models/audio/speech_create_params.rb index 7c59f76f..114d8fa3 100644 --- a/lib/openai/models/audio/speech_create_params.rb +++ b/lib/openai/models/audio/speech_create_params.rb @@ -5,8 +5,7 @@ module Models module Audio # @see OpenAI::Resources::Audio::Speech#create class SpeechCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute input @@ -31,39 +30,27 @@ class SpeechCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::Audio::SpeechCreateParams::Voice] required :voice, union: -> { OpenAI::Models::Audio::SpeechCreateParams::Voice } - # @!attribute [r] instructions + # @!attribute instructions # Control the voice of your generated audio with additional instructions. Does not # work with `tts-1` or `tts-1-hd`. # # @return [String, nil] optional :instructions, String - # @!parse - # # @return [String] - # attr_writer :instructions - - # @!attribute [r] response_format + # @!attribute response_format # The format to audio in. Supported formats are `mp3`, `opus`, `aac`, `flac`, # `wav`, and `pcm`. # # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat } - # @!parse - # # @return [Symbol, OpenAI::Models::Audio::SpeechCreateParams::ResponseFormat] - # attr_writer :response_format - - # @!attribute [r] speed + # @!attribute speed # The speed of the generated audio. Select a value from `0.25` to `4.0`. `1.0` is # the default. # # @return [Float, nil] optional :speed, Float - # @!parse - # # @return [Float] - # attr_writer :speed - # @!method initialize(input:, model:, voice:, instructions: nil, response_format: nil, speed: nil, request_options: {}) # @param input [String] # @param model [String, Symbol, OpenAI::Models::Audio::SpeechModel] diff --git a/lib/openai/models/audio/transcription.rb b/lib/openai/models/audio/transcription.rb index 0e0cb142..96e65045 100644 --- a/lib/openai/models/audio/transcription.rb +++ b/lib/openai/models/audio/transcription.rb @@ -10,7 +10,7 @@ class Transcription < OpenAI::Internal::Type::BaseModel # @return [String] required :text, String - # @!attribute [r] logprobs + # @!attribute logprobs # The log probabilities of the tokens in the transcription. Only returned with the # models `gpt-4o-transcribe` and `gpt-4o-mini-transcribe` if `logprobs` is added # to the `include` array. @@ -18,10 +18,6 @@ class Transcription < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::Transcription::Logprob] } - # @!parse - # # @return [Array] - # attr_writer :logprobs - # @!method initialize(text:, logprobs: nil) # Represents a transcription response returned by model, based on the provided # input. @@ -30,36 +26,24 @@ class Transcription < OpenAI::Internal::Type::BaseModel # @param logprobs [Array] class Logprob < OpenAI::Internal::Type::BaseModel - # @!attribute [r] token + # @!attribute token # The token in the transcription. # # @return [String, nil] optional :token, String - # @!parse - # # @return [String] - # attr_writer :token - - # @!attribute [r] bytes + # @!attribute bytes # The bytes of the token. # # @return [Array, nil] optional :bytes, OpenAI::Internal::Type::ArrayOf[Float] - # @!parse - # # @return [Array] - # attr_writer :bytes - - # @!attribute [r] logprob + # @!attribute logprob # The log probability of the token. # # @return [Float, nil] optional :logprob, Float - # @!parse - # # @return [Float] - # attr_writer :logprob - # @!method initialize(token: nil, bytes: nil, logprob: nil) # @param token [String] # @param bytes [Array] diff --git a/lib/openai/models/audio/transcription_create_params.rb b/lib/openai/models/audio/transcription_create_params.rb index 259a21cb..4377a2ab 100644 --- a/lib/openai/models/audio/transcription_create_params.rb +++ b/lib/openai/models/audio/transcription_create_params.rb @@ -7,8 +7,7 @@ module Audio # # @see OpenAI::Resources::Audio::Transcriptions#create_streaming class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute file @@ -26,7 +25,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Models::Audio::TranscriptionCreateParams::Model } - # @!attribute [r] include + # @!attribute include # Additional information to include in the transcription response. `logprobs` will # return the log probabilities of the tokens in the response to understand the # model's confidence in the transcription. `logprobs` only works with @@ -37,11 +36,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionInclude] } - # @!parse - # # @return [Array] - # attr_writer :include - - # @!attribute [r] language + # @!attribute language # The language of the input audio. Supplying the input language in # [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) (e.g. `en`) # format will improve accuracy and latency. @@ -49,11 +44,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :language, String - # @!parse - # # @return [String] - # attr_writer :language - - # @!attribute [r] prompt + # @!attribute prompt # An optional text to guide the model's style or continue a previous audio # segment. The # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) @@ -62,11 +53,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :prompt, String - # @!parse - # # @return [String] - # attr_writer :prompt - - # @!attribute [r] response_format + # @!attribute response_format # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. For `gpt-4o-transcribe` and `gpt-4o-mini-transcribe`, # the only supported format is `json`. @@ -74,11 +61,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::AudioResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::AudioResponseFormat } - # @!parse - # # @return [Symbol, OpenAI::Models::AudioResponseFormat] - # attr_writer :response_format - - # @!attribute [r] temperature + # @!attribute temperature # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # output more random, while lower values like 0.2 will make it more focused and # deterministic. If set to 0, the model will use @@ -88,11 +71,7 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float - # @!parse - # # @return [Float] - # attr_writer :temperature - - # @!attribute [r] timestamp_granularities + # @!attribute timestamp_granularities # The timestamp granularities to populate for this transcription. # `response_format` must be set `verbose_json` to use timestamp granularities. # Either or both of these options are supported: `word`, or `segment`. Note: There @@ -103,10 +82,6 @@ class TranscriptionCreateParams < OpenAI::Internal::Type::BaseModel optional :timestamp_granularities, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Audio::TranscriptionCreateParams::TimestampGranularity] } - # @!parse - # # @return [Array] - # attr_writer :timestamp_granularities - # @!method initialize(file:, model:, include: nil, language: nil, prompt: nil, response_format: nil, temperature: nil, timestamp_granularities: nil, request_options: {}) # @param file [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::AudioModel] diff --git a/lib/openai/models/audio/transcription_text_delta_event.rb b/lib/openai/models/audio/transcription_text_delta_event.rb index f7dff312..7f9705d1 100644 --- a/lib/openai/models/audio/transcription_text_delta_event.rb +++ b/lib/openai/models/audio/transcription_text_delta_event.rb @@ -16,7 +16,7 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"transcript.text.delta"] required :type, const: :"transcript.text.delta" - # @!attribute [r] logprobs + # @!attribute logprobs # The log probabilities of the delta. Only included if you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) # with the `include[]` parameter set to `logprobs`. @@ -25,10 +25,6 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDeltaEvent::Logprob] } - # @!parse - # # @return [Array] - # attr_writer :logprobs - # @!method initialize(delta:, logprobs: nil, type: :"transcript.text.delta") # Emitted when there is an additional text delta. This is also the first event # emitted when the transcription starts. Only emitted when you @@ -40,36 +36,24 @@ class TranscriptionTextDeltaEvent < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :"transcript.text.delta"] class Logprob < OpenAI::Internal::Type::BaseModel - # @!attribute [r] token + # @!attribute token # The token that was used to generate the log probability. # # @return [String, nil] optional :token, String - # @!parse - # # @return [String] - # attr_writer :token - - # @!attribute [r] bytes + # @!attribute bytes # The bytes that were used to generate the log probability. # # @return [Array, nil] optional :bytes, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Array] - # attr_writer :bytes - - # @!attribute [r] logprob + # @!attribute logprob # The log probability of the token. # # @return [Float, nil] optional :logprob, Float - # @!parse - # # @return [Float] - # attr_writer :logprob - # @!method initialize(token: nil, bytes: nil, logprob: nil) # @param token [String] # @param bytes [Array] diff --git a/lib/openai/models/audio/transcription_text_done_event.rb b/lib/openai/models/audio/transcription_text_done_event.rb index 3e0fb33c..be7eb322 100644 --- a/lib/openai/models/audio/transcription_text_done_event.rb +++ b/lib/openai/models/audio/transcription_text_done_event.rb @@ -16,7 +16,7 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"transcript.text.done"] required :type, const: :"transcript.text.done" - # @!attribute [r] logprobs + # @!attribute logprobs # The log probabilities of the individual tokens in the transcription. Only # included if you # [create a transcription](https://platform.openai.com/docs/api-reference/audio/create-transcription) @@ -26,10 +26,6 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel optional :logprobs, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionTextDoneEvent::Logprob] } - # @!parse - # # @return [Array] - # attr_writer :logprobs - # @!method initialize(text:, logprobs: nil, type: :"transcript.text.done") # Emitted when the transcription is complete. Contains the complete transcription # text. Only emitted when you @@ -41,36 +37,24 @@ class TranscriptionTextDoneEvent < OpenAI::Internal::Type::BaseModel # @param type [Symbol, :"transcript.text.done"] class Logprob < OpenAI::Internal::Type::BaseModel - # @!attribute [r] token + # @!attribute token # The token that was used to generate the log probability. # # @return [String, nil] optional :token, String - # @!parse - # # @return [String] - # attr_writer :token - - # @!attribute [r] bytes + # @!attribute bytes # The bytes that were used to generate the log probability. # # @return [Array, nil] optional :bytes, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Array] - # attr_writer :bytes - - # @!attribute [r] logprob + # @!attribute logprob # The log probability of the token. # # @return [Float, nil] optional :logprob, Float - # @!parse - # # @return [Float] - # attr_writer :logprob - # @!method initialize(token: nil, bytes: nil, logprob: nil) # @param token [String] # @param bytes [Array] diff --git a/lib/openai/models/audio/transcription_verbose.rb b/lib/openai/models/audio/transcription_verbose.rb index f0b3f7c3..ae9e3c77 100644 --- a/lib/openai/models/audio/transcription_verbose.rb +++ b/lib/openai/models/audio/transcription_verbose.rb @@ -22,26 +22,18 @@ class TranscriptionVerbose < OpenAI::Internal::Type::BaseModel # @return [String] required :text, String - # @!attribute [r] segments + # @!attribute segments # Segments of the transcribed text and their corresponding details. # # @return [Array, nil] optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } - # @!parse - # # @return [Array] - # attr_writer :segments - - # @!attribute [r] words + # @!attribute words # Extracted words and their corresponding timestamps. # # @return [Array, nil] optional :words, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionWord] } - # @!parse - # # @return [Array] - # attr_writer :words - # @!method initialize(duration:, language:, text:, segments: nil, words: nil) # Represents a verbose json transcription response returned by model, based on the # provided input. diff --git a/lib/openai/models/audio/translation_create_params.rb b/lib/openai/models/audio/translation_create_params.rb index 7589e685..ce70c85f 100644 --- a/lib/openai/models/audio/translation_create_params.rb +++ b/lib/openai/models/audio/translation_create_params.rb @@ -5,8 +5,7 @@ module Models module Audio # @see OpenAI::Resources::Audio::Translations#create class TranslationCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute file @@ -23,7 +22,7 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::AudioModel] required :model, union: -> { OpenAI::Models::Audio::TranslationCreateParams::Model } - # @!attribute [r] prompt + # @!attribute prompt # An optional text to guide the model's style or continue a previous audio # segment. The # [prompt](https://platform.openai.com/docs/guides/speech-to-text#prompting) @@ -32,22 +31,14 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :prompt, String - # @!parse - # # @return [String] - # attr_writer :prompt - - # @!attribute [r] response_format + # @!attribute response_format # The format of the output, in one of these options: `json`, `text`, `srt`, # `verbose_json`, or `vtt`. # # @return [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat, nil] optional :response_format, enum: -> { OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat } - # @!parse - # # @return [Symbol, OpenAI::Models::Audio::TranslationCreateParams::ResponseFormat] - # attr_writer :response_format - - # @!attribute [r] temperature + # @!attribute temperature # The sampling temperature, between 0 and 1. Higher values like 0.8 will make the # output more random, while lower values like 0.2 will make it more focused and # deterministic. If set to 0, the model will use @@ -57,10 +48,6 @@ class TranslationCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float - # @!parse - # # @return [Float] - # attr_writer :temperature - # @!method initialize(file:, model:, prompt: nil, response_format: nil, temperature: nil, request_options: {}) # @param file [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::AudioModel] diff --git a/lib/openai/models/audio/translation_verbose.rb b/lib/openai/models/audio/translation_verbose.rb index 5d802ffc..c5c9c54c 100644 --- a/lib/openai/models/audio/translation_verbose.rb +++ b/lib/openai/models/audio/translation_verbose.rb @@ -22,16 +22,12 @@ class TranslationVerbose < OpenAI::Internal::Type::BaseModel # @return [String] required :text, String - # @!attribute [r] segments + # @!attribute segments # Segments of the translated text and their corresponding details. # # @return [Array, nil] optional :segments, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Audio::TranscriptionSegment] } - # @!parse - # # @return [Array] - # attr_writer :segments - # @!method initialize(duration:, language:, text:, segments: nil) # @param duration [Float] # @param language [String] diff --git a/lib/openai/models/batch.rb b/lib/openai/models/batch.rb index 0eb7ef45..d8a84818 100644 --- a/lib/openai/models/batch.rb +++ b/lib/openai/models/batch.rb @@ -45,105 +45,65 @@ class Batch < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Batch::Status] required :status, enum: -> { OpenAI::Models::Batch::Status } - # @!attribute [r] cancelled_at + # @!attribute cancelled_at # The Unix timestamp (in seconds) for when the batch was cancelled. # # @return [Integer, nil] optional :cancelled_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :cancelled_at - - # @!attribute [r] cancelling_at + # @!attribute cancelling_at # The Unix timestamp (in seconds) for when the batch started cancelling. # # @return [Integer, nil] optional :cancelling_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :cancelling_at - - # @!attribute [r] completed_at + # @!attribute completed_at # The Unix timestamp (in seconds) for when the batch was completed. # # @return [Integer, nil] optional :completed_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :completed_at - - # @!attribute [r] error_file_id + # @!attribute error_file_id # The ID of the file containing the outputs of requests with errors. # # @return [String, nil] optional :error_file_id, String - # @!parse - # # @return [String] - # attr_writer :error_file_id - - # @!attribute [r] errors + # @!attribute errors # # @return [OpenAI::Models::Batch::Errors, nil] optional :errors, -> { OpenAI::Models::Batch::Errors } - # @!parse - # # @return [OpenAI::Models::Batch::Errors] - # attr_writer :errors - - # @!attribute [r] expired_at + # @!attribute expired_at # The Unix timestamp (in seconds) for when the batch expired. # # @return [Integer, nil] optional :expired_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :expired_at - - # @!attribute [r] expires_at + # @!attribute expires_at # The Unix timestamp (in seconds) for when the batch will expire. # # @return [Integer, nil] optional :expires_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :expires_at - - # @!attribute [r] failed_at + # @!attribute failed_at # The Unix timestamp (in seconds) for when the batch failed. # # @return [Integer, nil] optional :failed_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :failed_at - - # @!attribute [r] finalizing_at + # @!attribute finalizing_at # The Unix timestamp (in seconds) for when the batch started finalizing. # # @return [Integer, nil] optional :finalizing_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :finalizing_at - - # @!attribute [r] in_progress_at + # @!attribute in_progress_at # The Unix timestamp (in seconds) for when the batch started processing. # # @return [Integer, nil] optional :in_progress_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :in_progress_at - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -155,26 +115,18 @@ class Batch < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] output_file_id + # @!attribute output_file_id # The ID of the file containing the outputs of successfully executed requests. # # @return [String, nil] optional :output_file_id, String - # @!parse - # # @return [String] - # attr_writer :output_file_id - - # @!attribute [r] request_counts + # @!attribute request_counts # The request counts for different statuses within the batch. # # @return [OpenAI::Models::BatchRequestCounts, nil] optional :request_counts, -> { OpenAI::Models::BatchRequestCounts } - # @!parse - # # @return [OpenAI::Models::BatchRequestCounts] - # attr_writer :request_counts - # @!method initialize(id:, completion_window:, created_at:, endpoint:, input_file_id:, status:, cancelled_at: nil, cancelling_at: nil, completed_at: nil, error_file_id: nil, errors: nil, expired_at: nil, expires_at: nil, failed_at: nil, finalizing_at: nil, in_progress_at: nil, metadata: nil, output_file_id: nil, request_counts: nil, object: :batch) # @param id [String] # @param completion_window [String] @@ -218,25 +170,17 @@ module Status # @see OpenAI::Models::Batch#errors class Errors < OpenAI::Internal::Type::BaseModel - # @!attribute [r] data + # @!attribute data # # @return [Array, nil] optional :data, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::BatchError] } - # @!parse - # # @return [Array] - # attr_writer :data - - # @!attribute [r] object + # @!attribute object # The object type, which is always `list`. # # @return [String, nil] optional :object, String - # @!parse - # # @return [String] - # attr_writer :object - # @!method initialize(data: nil, object: nil) # @param data [Array] # @param object [String] diff --git a/lib/openai/models/batch_cancel_params.rb b/lib/openai/models/batch_cancel_params.rb index 60cd89b8..9068ce31 100644 --- a/lib/openai/models/batch_cancel_params.rb +++ b/lib/openai/models/batch_cancel_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#cancel class BatchCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/batch_create_params.rb b/lib/openai/models/batch_create_params.rb index ef913352..9c5654db 100644 --- a/lib/openai/models/batch_create_params.rb +++ b/lib/openai/models/batch_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#create class BatchCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute completion_window diff --git a/lib/openai/models/batch_error.rb b/lib/openai/models/batch_error.rb index 513fcaf3..9d629686 100644 --- a/lib/openai/models/batch_error.rb +++ b/lib/openai/models/batch_error.rb @@ -3,32 +3,24 @@ module OpenAI module Models class BatchError < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code + # @!attribute code # An error code identifying the error type. # # @return [String, nil] optional :code, String - # @!parse - # # @return [String] - # attr_writer :code - # @!attribute line # The line number of the input file where the error occurred, if applicable. # # @return [Integer, nil] optional :line, Integer, nil?: true - # @!attribute [r] message + # @!attribute message # A human-readable message providing more details about the error. # # @return [String, nil] optional :message, String - # @!parse - # # @return [String] - # attr_writer :message - # @!attribute param # The name of the parameter that caused the error, if applicable. # diff --git a/lib/openai/models/batch_list_params.rb b/lib/openai/models/batch_list_params.rb index ab35f805..388dc273 100644 --- a/lib/openai/models/batch_list_params.rb +++ b/lib/openai/models/batch_list_params.rb @@ -4,11 +4,10 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#list class BatchListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -17,21 +16,13 @@ class BatchListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - # @!method initialize(after: nil, limit: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/batch_retrieve_params.rb b/lib/openai/models/batch_retrieve_params.rb index 6c9e459c..a03157a4 100644 --- a/lib/openai/models/batch_retrieve_params.rb +++ b/lib/openai/models/batch_retrieve_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Batches#retrieve class BatchRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/beta/assistant.rb b/lib/openai/models/beta/assistant.rb index 6f861ccc..a4069e36 100644 --- a/lib/openai/models/beta/assistant.rb +++ b/lib/openai/models/beta/assistant.rb @@ -142,24 +142,16 @@ class Assistant < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Assistant#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::Assistant::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::Assistant::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::Assistant::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -171,7 +163,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Assistant::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter`` tool. There can be a maximum of 20 files # associated with the tool. @@ -179,17 +171,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::Assistant::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -198,10 +186,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - # @!method initialize(vector_store_ids: nil) # @param vector_store_ids [Array] end diff --git a/lib/openai/models/beta/assistant_create_params.rb b/lib/openai/models/beta/assistant_create_params.rb index 5c2c0fbe..089e0fe9 100644 --- a/lib/openai/models/beta/assistant_create_params.rb +++ b/lib/openai/models/beta/assistant_create_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Assistants#create class AssistantCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute model @@ -102,7 +101,7 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources }, nil?: true - # @!attribute [r] tools + # @!attribute tools # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. @@ -110,10 +109,6 @@ class AssistantCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -156,25 +151,17 @@ module Model end class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -186,7 +173,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -194,17 +181,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::AssistantCreateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -213,11 +196,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - - # @!attribute [r] vector_stores + # @!attribute vector_stores # A helper to create a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # with file_ids and attach it to this assistant. There can be a maximum of 1 @@ -227,16 +206,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore] } - # @!parse - # # @return [Array] - # attr_writer :vector_stores - # @!method initialize(vector_store_ids: nil, vector_stores: nil) # @param vector_store_ids [Array] # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # @@ -244,11 +219,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :chunking_strategy, union: -> { OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::AssistantCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # attr_writer :chunking_strategy - - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to # add to the vector store. There can be a maximum of 10000 files in a vector # store. @@ -256,10 +227,6 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and diff --git a/lib/openai/models/beta/assistant_delete_params.rb b/lib/openai/models/beta/assistant_delete_params.rb index df76595d..6200b148 100644 --- a/lib/openai/models/beta/assistant_delete_params.rb +++ b/lib/openai/models/beta/assistant_delete_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Assistants#delete class AssistantDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/beta/assistant_list_params.rb b/lib/openai/models/beta/assistant_list_params.rb index c35334fc..5d3b268b 100644 --- a/lib/openai/models/beta/assistant_list_params.rb +++ b/lib/openai/models/beta/assistant_list_params.rb @@ -5,11 +5,10 @@ module Models module Beta # @see OpenAI::Resources::Beta::Assistants#list class AssistantListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -18,11 +17,7 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -31,32 +26,20 @@ class AssistantListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::AssistantListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::AssistantListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::AssistantListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/beta/assistant_retrieve_params.rb b/lib/openai/models/beta/assistant_retrieve_params.rb index 6bb8b075..852988c7 100644 --- a/lib/openai/models/beta/assistant_retrieve_params.rb +++ b/lib/openai/models/beta/assistant_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Assistants#retrieve class AssistantRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/beta/assistant_stream_event.rb b/lib/openai/models/beta/assistant_stream_event.rb index e8f7fefa..eaa92aba 100644 --- a/lib/openai/models/beta/assistant_stream_event.rb +++ b/lib/openai/models/beta/assistant_stream_event.rb @@ -122,16 +122,12 @@ class ThreadCreated < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.created"] required :event, const: :"thread.created" - # @!attribute [r] enabled + # @!attribute enabled # Whether to enable input audio transcription. # # @return [Boolean, nil] optional :enabled, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :enabled - # @!method initialize(data:, enabled: nil, event: :"thread.created") # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is diff --git a/lib/openai/models/beta/assistant_tool_choice.rb b/lib/openai/models/beta/assistant_tool_choice.rb index 21e29156..43914c38 100644 --- a/lib/openai/models/beta/assistant_tool_choice.rb +++ b/lib/openai/models/beta/assistant_tool_choice.rb @@ -10,15 +10,11 @@ class AssistantToolChoice < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Beta::AssistantToolChoice::Type] required :type, enum: -> { OpenAI::Models::Beta::AssistantToolChoice::Type } - # @!attribute [r] function + # @!attribute function # # @return [OpenAI::Models::Beta::AssistantToolChoiceFunction, nil] optional :function, -> { OpenAI::Models::Beta::AssistantToolChoiceFunction } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantToolChoiceFunction] - # attr_writer :function - # @!method initialize(type:, function: nil) # Specifies a tool the model should use. Use to force the model to call a specific # tool. diff --git a/lib/openai/models/beta/assistant_update_params.rb b/lib/openai/models/beta/assistant_update_params.rb index 6cb2ed05..c0a93261 100644 --- a/lib/openai/models/beta/assistant_update_params.rb +++ b/lib/openai/models/beta/assistant_update_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Assistants#update class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute description @@ -33,7 +32,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] model + # @!attribute model # ID of the model to use. You can use the # [List models](https://platform.openai.com/docs/api-reference/models/list) API to # see all of your available models, or see our @@ -43,10 +42,6 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model, nil] optional :model, union: -> { OpenAI::Models::Beta::AssistantUpdateParams::Model } - # @!parse - # # @return [String, Symbol, OpenAI::Models::Beta::AssistantUpdateParams::Model] - # attr_writer :model - # @!attribute name # The name of the assistant. The maximum length is 256 characters. # @@ -106,7 +101,7 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources, nil] optional :tool_resources, -> { OpenAI::Models::Beta::AssistantUpdateParams::ToolResources }, nil?: true - # @!attribute [r] tools + # @!attribute tools # A list of tool enabled on the assistant. There can be a maximum of 128 tools per # assistant. Tools can be of types `code_interpreter`, `file_search`, or # `function`. @@ -114,10 +109,6 @@ class AssistantUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::AssistantTool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -270,25 +261,17 @@ module Model end class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::AssistantUpdateParams::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -300,7 +283,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # Overrides the list of # [file](https://platform.openai.com/docs/api-reference/files) IDs made available # to the `code_interpreter` tool. There can be a maximum of 20 files associated @@ -309,17 +292,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::AssistantUpdateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # Overrides the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -328,10 +307,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - # @!method initialize(vector_store_ids: nil) # @param vector_store_ids [Array] end diff --git a/lib/openai/models/beta/file_search_tool.rb b/lib/openai/models/beta/file_search_tool.rb index b9baee3e..9dc13172 100644 --- a/lib/openai/models/beta/file_search_tool.rb +++ b/lib/openai/models/beta/file_search_tool.rb @@ -10,23 +10,19 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!attribute [r] file_search + # @!attribute file_search # Overrides for the file search tool. # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::FileSearchTool::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch] - # attr_writer :file_search - # @!method initialize(file_search: nil, type: :file_search) # @param file_search [OpenAI::Models::Beta::FileSearchTool::FileSearch] # @param type [Symbol, :file_search] # @see OpenAI::Models::Beta::FileSearchTool#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] max_num_results + # @!attribute max_num_results # The maximum number of results the file search tool should output. The default is # 20 for `gpt-4*` models and 5 for `gpt-3.5-turbo`. This number should be between # 1 and 50 inclusive. @@ -39,11 +35,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :max_num_results, Integer - # @!parse - # # @return [Integer] - # attr_writer :max_num_results - - # @!attribute [r] ranking_options + # @!attribute ranking_options # The ranking options for the file search. If not specified, the file search tool # will use the `auto` ranker and a score_threshold of 0. # @@ -54,10 +46,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions } - # @!parse - # # @return [OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions] - # attr_writer :ranking_options - # @!method initialize(max_num_results: nil, ranking_options: nil) # Overrides for the file search tool. # @@ -73,17 +61,13 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @return [Float] required :score_threshold, Float - # @!attribute [r] ranker + # @!attribute ranker # The ranker to use for the file search. If not specified will use the `auto` # ranker. # # @return [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::FileSearchTool::FileSearch::RankingOptions::Ranker] - # attr_writer :ranker - # @!method initialize(score_threshold:, ranker: nil) # The ranking options for the file search. If not specified, the file search tool # will use the `auto` ranker and a score_threshold of 0. diff --git a/lib/openai/models/beta/thread.rb b/lib/openai/models/beta/thread.rb index bdb79d97..757ea5d5 100644 --- a/lib/openai/models/beta/thread.rb +++ b/lib/openai/models/beta/thread.rb @@ -55,24 +55,16 @@ class Thread < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::Thread::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::Thread::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::Thread::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::Thread::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -84,7 +76,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -92,17 +84,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::Thread::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -111,10 +99,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - # @!method initialize(vector_store_ids: nil) # @param vector_store_ids [Array] end diff --git a/lib/openai/models/beta/thread_create_and_run_params.rb b/lib/openai/models/beta/thread_create_and_run_params.rb index c87d75f8..131ba814 100644 --- a/lib/openai/models/beta/thread_create_and_run_params.rb +++ b/lib/openai/models/beta/thread_create_and_run_params.rb @@ -7,8 +7,7 @@ module Beta # # @see OpenAI::Resources::Beta::Threads#stream_raw class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute assistant_id @@ -66,7 +65,7 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::ChatModel, nil] optional :model, union: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Model }, nil?: true - # @!attribute [r] parallel_tool_calls + # @!attribute parallel_tool_calls # Whether to enable # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. @@ -74,10 +73,6 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :parallel_tool_calls - # @!attribute response_format # Specifies the format that the model must output. Compatible with # [GPT-4o](https://platform.openai.com/docs/models#gpt-4o), @@ -111,17 +106,13 @@ class ThreadCreateAndRunParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float, nil?: true - # @!attribute [r] thread + # @!attribute thread # Options to create a new thread. If no thread is provided when running a request, # an empty thread will be created. # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread, nil] optional :thread, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread] - # attr_writer :thread - # @!attribute tool_choice # Controls which (if any) tool is called by the model. `none` means the model will # not call any tools and instead generates a message. `auto` is the default value @@ -206,7 +197,7 @@ module Model end class Thread < OpenAI::Internal::Type::BaseModel - # @!attribute [r] messages + # @!attribute messages # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # @@ -214,10 +205,6 @@ class Thread < OpenAI::Internal::Type::BaseModel optional :messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message] } - # @!parse - # # @return [Array] - # attr_writer :messages - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -329,27 +316,19 @@ module Role end class Attachment < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to attach to the message. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] tools + # @!attribute tools # The tools to add this file to. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::Message::Attachment::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] # @param tools [Array] @@ -383,26 +362,18 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread#tool_resources class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -414,7 +385,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -422,17 +393,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -441,11 +408,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - - # @!attribute [r] vector_stores + # @!attribute vector_stores # A helper to create a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # with file_ids and attach it to this thread. There can be a maximum of 1 vector @@ -455,16 +418,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore] } - # @!parse - # # @return [Array] - # attr_writer :vector_stores - # @!method initialize(vector_store_ids: nil, vector_stores: nil) # @param vector_store_ids [Array] # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # @@ -472,11 +431,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :chunking_strategy, union: -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateAndRunParams::Thread::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # attr_writer :chunking_strategy - - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to # add to the vector store. There can be a maximum of 10000 files in a vector # store. @@ -484,10 +439,6 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -583,25 +534,17 @@ class Static < OpenAI::Internal::Type::BaseModel end class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are used by the assistant's tools. The resources are # specific to the type of tool. For example, the `code_interpreter` tool requires @@ -613,7 +556,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -621,17 +564,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateAndRunParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The ID of the # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this assistant. There can be a maximum of 1 vector store attached to @@ -640,10 +579,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - # @!method initialize(vector_store_ids: nil) # @param vector_store_ids [Array] end diff --git a/lib/openai/models/beta/thread_create_params.rb b/lib/openai/models/beta/thread_create_params.rb index 85033056..2d768eaf 100644 --- a/lib/openai/models/beta/thread_create_params.rb +++ b/lib/openai/models/beta/thread_create_params.rb @@ -5,11 +5,10 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#create class ThreadCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] messages + # @!attribute messages # A list of [messages](https://platform.openai.com/docs/api-reference/messages) to # start the thread with. # @@ -17,10 +16,6 @@ class ThreadCreateParams < OpenAI::Internal::Type::BaseModel optional :messages, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::Message] } - # @!parse - # # @return [Array] - # attr_writer :messages - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -128,27 +123,19 @@ module Role end class Attachment < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to attach to the message. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] tools + # @!attribute tools # The tools to add this file to. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::ThreadCreateParams::Message::Attachment::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] # @param tools [Array] @@ -181,25 +168,17 @@ class FileSearch < OpenAI::Internal::Type::BaseModel end class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -211,7 +190,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -219,17 +198,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadCreateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -238,11 +213,7 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - - # @!attribute [r] vector_stores + # @!attribute vector_stores # A helper to create a # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # with file_ids and attach it to this thread. There can be a maximum of 1 vector @@ -252,16 +223,12 @@ class FileSearch < OpenAI::Internal::Type::BaseModel optional :vector_stores, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore] } - # @!parse - # # @return [Array] - # attr_writer :vector_stores - # @!method initialize(vector_store_ids: nil, vector_stores: nil) # @param vector_store_ids [Array] # @param vector_stores [Array] class VectorStore < OpenAI::Internal::Type::BaseModel - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. # @@ -269,11 +236,7 @@ class VectorStore < OpenAI::Internal::Type::BaseModel optional :chunking_strategy, union: -> { OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Auto, OpenAI::Models::Beta::ThreadCreateParams::ToolResources::FileSearch::VectorStore::ChunkingStrategy::Static] - # attr_writer :chunking_strategy - - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs to # add to the vector store. There can be a maximum of 10000 files in a vector # store. @@ -281,10 +244,6 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and diff --git a/lib/openai/models/beta/thread_delete_params.rb b/lib/openai/models/beta/thread_delete_params.rb index 2071367c..308e47db 100644 --- a/lib/openai/models/beta/thread_delete_params.rb +++ b/lib/openai/models/beta/thread_delete_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#delete class ThreadDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/beta/thread_retrieve_params.rb b/lib/openai/models/beta/thread_retrieve_params.rb index 663f5488..c27f0bf6 100644 --- a/lib/openai/models/beta/thread_retrieve_params.rb +++ b/lib/openai/models/beta/thread_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#retrieve class ThreadRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/beta/thread_stream_event.rb b/lib/openai/models/beta/thread_stream_event.rb index d7dee4f7..bf5cc945 100644 --- a/lib/openai/models/beta/thread_stream_event.rb +++ b/lib/openai/models/beta/thread_stream_event.rb @@ -16,16 +16,12 @@ class ThreadStreamEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"thread.created"] required :event, const: :"thread.created" - # @!attribute [r] enabled + # @!attribute enabled # Whether to enable input audio transcription. # # @return [Boolean, nil] optional :enabled, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :enabled - # @!method initialize(data:, enabled: nil, event: :"thread.created") # Occurs when a new # [thread](https://platform.openai.com/docs/api-reference/threads/object) is diff --git a/lib/openai/models/beta/thread_update_params.rb b/lib/openai/models/beta/thread_update_params.rb index 97527fbf..8d7d621e 100644 --- a/lib/openai/models/beta/thread_update_params.rb +++ b/lib/openai/models/beta/thread_update_params.rb @@ -5,8 +5,7 @@ module Models module Beta # @see OpenAI::Resources::Beta::Threads#update class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute metadata @@ -35,25 +34,17 @@ class ThreadUpdateParams < OpenAI::Internal::Type::BaseModel # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolResources < OpenAI::Internal::Type::BaseModel - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::CodeInterpreter] - # attr_writer :code_interpreter - - # @!attribute [r] file_search + # @!attribute file_search # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch, nil] optional :file_search, -> { OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch } - # @!parse - # # @return [OpenAI::Models::Beta::ThreadUpdateParams::ToolResources::FileSearch] - # attr_writer :file_search - # @!method initialize(code_interpreter: nil, file_search: nil) # A set of resources that are made available to the assistant's tools in this # thread. The resources are specific to the type of tool. For example, the @@ -65,7 +56,7 @@ class ToolResources < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [file](https://platform.openai.com/docs/api-reference/files) IDs made # available to the `code_interpreter` tool. There can be a maximum of 20 files # associated with the tool. @@ -73,17 +64,13 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!method initialize(file_ids: nil) # @param file_ids [Array] end # @see OpenAI::Models::Beta::ThreadUpdateParams::ToolResources#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] vector_store_ids + # @!attribute vector_store_ids # The # [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object) # attached to this thread. There can be a maximum of 1 vector store attached to @@ -92,10 +79,6 @@ class FileSearch < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :vector_store_ids - # @!method initialize(vector_store_ids: nil) # @param vector_store_ids [Array] end diff --git a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb index 66b0623a..3d825a51 100644 --- a/lib/openai/models/beta/threads/file_citation_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_citation_delta_annotation.rb @@ -17,43 +17,27 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_citation] required :type, const: :file_citation - # @!attribute [r] end_index + # @!attribute end_index # # @return [Integer, nil] optional :end_index, Integer - # @!parse - # # @return [Integer] - # attr_writer :end_index - - # @!attribute [r] file_citation + # @!attribute file_citation # # @return [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation, nil] optional :file_citation, -> { OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation::FileCitation] - # attr_writer :file_citation - - # @!attribute [r] start_index + # @!attribute start_index # # @return [Integer, nil] optional :start_index, Integer - # @!parse - # # @return [Integer] - # attr_writer :start_index - - # @!attribute [r] text + # @!attribute text # The text in the message content that needs to be replaced. # # @return [String, nil] optional :text, String - # @!parse - # # @return [String] - # attr_writer :text - # @!method initialize(index:, end_index: nil, file_citation: nil, start_index: nil, text: nil, type: :file_citation) # A citation within the message that points to a specific quote from a specific # File associated with the assistant or the message. Generated when the assistant @@ -68,26 +52,18 @@ class FileCitationDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::FileCitationDeltaAnnotation#file_citation class FileCitation < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the specific File the citation is from. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] quote + # @!attribute quote # The specific quote in the file. # # @return [String, nil] optional :quote, String - # @!parse - # # @return [String] - # attr_writer :quote - # @!method initialize(file_id: nil, quote: nil) # @param file_id [String] # @param quote [String] diff --git a/lib/openai/models/beta/threads/file_path_delta_annotation.rb b/lib/openai/models/beta/threads/file_path_delta_annotation.rb index 065e7eab..679015ba 100644 --- a/lib/openai/models/beta/threads/file_path_delta_annotation.rb +++ b/lib/openai/models/beta/threads/file_path_delta_annotation.rb @@ -17,43 +17,27 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_path] required :type, const: :file_path - # @!attribute [r] end_index + # @!attribute end_index # # @return [Integer, nil] optional :end_index, Integer - # @!parse - # # @return [Integer] - # attr_writer :end_index - - # @!attribute [r] file_path + # @!attribute file_path # # @return [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath, nil] optional :file_path, -> { OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation::FilePath] - # attr_writer :file_path - - # @!attribute [r] start_index + # @!attribute start_index # # @return [Integer, nil] optional :start_index, Integer - # @!parse - # # @return [Integer] - # attr_writer :start_index - - # @!attribute [r] text + # @!attribute text # The text in the message content that needs to be replaced. # # @return [String, nil] optional :text, String - # @!parse - # # @return [String] - # attr_writer :text - # @!method initialize(index:, end_index: nil, file_path: nil, start_index: nil, text: nil, type: :file_path) # A URL for the file that's generated when the assistant used the # `code_interpreter` tool to generate a file. @@ -67,16 +51,12 @@ class FilePathDeltaAnnotation < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::FilePathDeltaAnnotation#file_path class FilePath < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file that was generated. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - # @!method initialize(file_id: nil) # @param file_id [String] end diff --git a/lib/openai/models/beta/threads/image_file.rb b/lib/openai/models/beta/threads/image_file.rb index 61b60dc0..df480221 100644 --- a/lib/openai/models/beta/threads/image_file.rb +++ b/lib/openai/models/beta/threads/image_file.rb @@ -13,17 +13,13 @@ class ImageFile < OpenAI::Internal::Type::BaseModel # @return [String] required :file_id, String - # @!attribute [r] detail + # @!attribute detail # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageFile::Detail } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] - # attr_writer :detail - # @!method initialize(file_id:, detail: nil) # @param file_id [String] # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFile::Detail] diff --git a/lib/openai/models/beta/threads/image_file_delta.rb b/lib/openai/models/beta/threads/image_file_delta.rb index 117dd1c1..25fc81e0 100644 --- a/lib/openai/models/beta/threads/image_file_delta.rb +++ b/lib/openai/models/beta/threads/image_file_delta.rb @@ -5,18 +5,14 @@ module Models module Beta module Threads class ImageFileDelta < OpenAI::Internal::Type::BaseModel - # @!attribute [r] detail + # @!attribute detail # Specifies the detail level of the image if specified by the user. `low` uses # fewer tokens, you can opt in to high resolution using `high`. # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageFileDelta::Detail } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] - # attr_writer :detail - - # @!attribute [r] file_id + # @!attribute file_id # The [File](https://platform.openai.com/docs/api-reference/files) ID of the image # in the message content. Set `purpose="vision"` when uploading the File if you # need to later display the file content. @@ -24,10 +20,6 @@ class ImageFileDelta < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - # @!method initialize(detail: nil, file_id: nil) # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageFileDelta::Detail] # @param file_id [String] diff --git a/lib/openai/models/beta/threads/image_file_delta_block.rb b/lib/openai/models/beta/threads/image_file_delta_block.rb index a4abc497..3befaf8c 100644 --- a/lib/openai/models/beta/threads/image_file_delta_block.rb +++ b/lib/openai/models/beta/threads/image_file_delta_block.rb @@ -17,15 +17,11 @@ class ImageFileDeltaBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_file] required :type, const: :image_file - # @!attribute [r] image_file + # @!attribute image_file # # @return [OpenAI::Models::Beta::Threads::ImageFileDelta, nil] optional :image_file, -> { OpenAI::Models::Beta::Threads::ImageFileDelta } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::ImageFileDelta] - # attr_writer :image_file - # @!method initialize(index:, image_file: nil, type: :image_file) # References an image [File](https://platform.openai.com/docs/api-reference/files) # in the content of a message. diff --git a/lib/openai/models/beta/threads/image_url.rb b/lib/openai/models/beta/threads/image_url.rb index 14266c31..1b88b1b2 100644 --- a/lib/openai/models/beta/threads/image_url.rb +++ b/lib/openai/models/beta/threads/image_url.rb @@ -12,17 +12,13 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!attribute [r] detail + # @!attribute detail # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. Default value is `auto` # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageURL::Detail } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] - # attr_writer :detail - # @!method initialize(url:, detail: nil) # @param url [String] # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURL::Detail] diff --git a/lib/openai/models/beta/threads/image_url_delta.rb b/lib/openai/models/beta/threads/image_url_delta.rb index 3b2f4eab..9ba548e3 100644 --- a/lib/openai/models/beta/threads/image_url_delta.rb +++ b/lib/openai/models/beta/threads/image_url_delta.rb @@ -5,28 +5,20 @@ module Models module Beta module Threads class ImageURLDelta < OpenAI::Internal::Type::BaseModel - # @!attribute [r] detail + # @!attribute detail # Specifies the detail level of the image. `low` uses fewer tokens, you can opt in # to high resolution using `high`. # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Beta::Threads::ImageURLDelta::Detail } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] - # attr_writer :detail - - # @!attribute [r] url + # @!attribute url # The URL of the image, must be a supported image types: jpeg, jpg, png, gif, # webp. # # @return [String, nil] optional :url, String - # @!parse - # # @return [String] - # attr_writer :url - # @!method initialize(detail: nil, url: nil) # @param detail [Symbol, OpenAI::Models::Beta::Threads::ImageURLDelta::Detail] # @param url [String] diff --git a/lib/openai/models/beta/threads/image_url_delta_block.rb b/lib/openai/models/beta/threads/image_url_delta_block.rb index 4f7b9a82..8b140bfb 100644 --- a/lib/openai/models/beta/threads/image_url_delta_block.rb +++ b/lib/openai/models/beta/threads/image_url_delta_block.rb @@ -17,15 +17,11 @@ class ImageURLDeltaBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image_url] required :type, const: :image_url - # @!attribute [r] image_url + # @!attribute image_url # # @return [OpenAI::Models::Beta::Threads::ImageURLDelta, nil] optional :image_url, -> { OpenAI::Models::Beta::Threads::ImageURLDelta } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::ImageURLDelta] - # attr_writer :image_url - # @!method initialize(index:, image_url: nil, type: :image_url) # References an image URL in the content of a message. # diff --git a/lib/openai/models/beta/threads/message.rb b/lib/openai/models/beta/threads/message.rb index 90cf7aba..ccff15ee 100644 --- a/lib/openai/models/beta/threads/message.rb +++ b/lib/openai/models/beta/threads/message.rb @@ -124,27 +124,19 @@ class Message < OpenAI::Internal::Type::BaseModel # @param object [Symbol, :"thread.message"] class Attachment < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to attach to the message. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] tools + # @!attribute tools # The tools to add this file to. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Message::Attachment::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] # @param tools [Array] diff --git a/lib/openai/models/beta/threads/message_create_params.rb b/lib/openai/models/beta/threads/message_create_params.rb index 081a69c8..711686f8 100644 --- a/lib/openai/models/beta/threads/message_create_params.rb +++ b/lib/openai/models/beta/threads/message_create_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#create class MessageCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute content @@ -87,27 +86,19 @@ module Role end class Attachment < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to attach to the message. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] tools + # @!attribute tools # The tools to add this file to. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageCreateParams::Attachment::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] # @param tools [Array] diff --git a/lib/openai/models/beta/threads/message_delete_params.rb b/lib/openai/models/beta/threads/message_delete_params.rb index 3625e9b1..2e3c77d2 100644 --- a/lib/openai/models/beta/threads/message_delete_params.rb +++ b/lib/openai/models/beta/threads/message_delete_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#delete class MessageDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/message_delta.rb b/lib/openai/models/beta/threads/message_delta.rb index 9458a9bc..04f0a1fd 100644 --- a/lib/openai/models/beta/threads/message_delta.rb +++ b/lib/openai/models/beta/threads/message_delta.rb @@ -5,27 +5,19 @@ module Models module Beta module Threads class MessageDelta < OpenAI::Internal::Type::BaseModel - # @!attribute [r] content + # @!attribute content # The content of the message in array of text and/or images. # # @return [Array, nil] optional :content, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::MessageContentDelta] } - # @!parse - # # @return [Array] - # attr_writer :content - - # @!attribute [r] role + # @!attribute role # The entity that produced the message. One of `user` or `assistant`. # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role, nil] optional :role, enum: -> { OpenAI::Models::Beta::Threads::MessageDelta::Role } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageDelta::Role] - # attr_writer :role - # @!method initialize(content: nil, role: nil) # The delta containing the fields that have changed on the Message. # diff --git a/lib/openai/models/beta/threads/message_list_params.rb b/lib/openai/models/beta/threads/message_list_params.rb index 1fffd076..2dbe8d80 100644 --- a/lib/openai/models/beta/threads/message_list_params.rb +++ b/lib/openai/models/beta/threads/message_list_params.rb @@ -6,11 +6,10 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#list class MessageListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -19,11 +18,7 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -32,42 +27,26 @@ class MessageListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::Threads::MessageListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::MessageListParams::Order] - # attr_writer :order - - # @!attribute [r] run_id + # @!attribute run_id # Filter messages by the run ID that generated them. # # @return [String, nil] optional :run_id, String - # @!parse - # # @return [String] - # attr_writer :run_id - # @!method initialize(after: nil, before: nil, limit: nil, order: nil, run_id: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/beta/threads/message_retrieve_params.rb b/lib/openai/models/beta/threads/message_retrieve_params.rb index 10e58171..4b724f65 100644 --- a/lib/openai/models/beta/threads/message_retrieve_params.rb +++ b/lib/openai/models/beta/threads/message_retrieve_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#retrieve class MessageRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/message_update_params.rb b/lib/openai/models/beta/threads/message_update_params.rb index 568cc684..09909fdc 100644 --- a/lib/openai/models/beta/threads/message_update_params.rb +++ b/lib/openai/models/beta/threads/message_update_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Messages#update class MessageUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/refusal_delta_block.rb b/lib/openai/models/beta/threads/refusal_delta_block.rb index dcc1bd21..9f1cc3a1 100644 --- a/lib/openai/models/beta/threads/refusal_delta_block.rb +++ b/lib/openai/models/beta/threads/refusal_delta_block.rb @@ -17,15 +17,11 @@ class RefusalDeltaBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :refusal] required :type, const: :refusal - # @!attribute [r] refusal + # @!attribute refusal # # @return [String, nil] optional :refusal, String - # @!parse - # # @return [String] - # attr_writer :refusal - # @!method initialize(index:, refusal: nil, type: :refusal) # The refusal content that is part of a message. # diff --git a/lib/openai/models/beta/threads/run.rb b/lib/openai/models/beta/threads/run.rb index fb23679b..7638e17a 100644 --- a/lib/openai/models/beta/threads/run.rb +++ b/lib/openai/models/beta/threads/run.rb @@ -253,17 +253,13 @@ class Run < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Run#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel - # @!attribute [r] reason + # @!attribute reason # The reason why the run is incomplete. This will point to which specific token # limit was reached over the course of the run. # # @return [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason, nil] optional :reason, enum: -> { OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::Run::IncompleteDetails::Reason] - # attr_writer :reason - # @!method initialize(reason: nil) # Details on why the run is incomplete. Will be `null` if the run is not # incomplete. diff --git a/lib/openai/models/beta/threads/run_cancel_params.rb b/lib/openai/models/beta/threads/run_cancel_params.rb index 6067a1a4..13baf1ce 100644 --- a/lib/openai/models/beta/threads/run_cancel_params.rb +++ b/lib/openai/models/beta/threads/run_cancel_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#cancel class RunCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/run_create_params.rb b/lib/openai/models/beta/threads/run_create_params.rb index 8ae80de8..6af10c28 100644 --- a/lib/openai/models/beta/threads/run_create_params.rb +++ b/lib/openai/models/beta/threads/run_create_params.rb @@ -8,8 +8,7 @@ module Threads # # @see OpenAI::Resources::Beta::Threads::Runs#create_stream_raw class RunCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute assistant_id @@ -20,7 +19,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @return [String] required :assistant_id, String - # @!attribute [r] include + # @!attribute include # A list of additional fields to include in the response. Currently the only # supported value is `step_details.tool_calls[*].file_search.results[*].content` # to fetch the file search result content. @@ -33,10 +32,6 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } - # @!parse - # # @return [Array] - # attr_writer :include - # @!attribute additional_instructions # Appends additional instructions at the end of the instructions for the run. This # is useful for modifying the behavior on a per-run basis without overriding other @@ -101,7 +96,7 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::ChatModel, nil] optional :model, union: -> { OpenAI::Models::Beta::Threads::RunCreateParams::Model }, nil?: true - # @!attribute [r] parallel_tool_calls + # @!attribute parallel_tool_calls # Whether to enable # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. @@ -109,10 +104,6 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :parallel_tool_calls - # @!attribute reasoning_effort # **o-series models only** # @@ -299,27 +290,19 @@ module Role end class Attachment < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to attach to the message. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] tools + # @!attribute tools # The tools to add this file to. # # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::RunCreateParams::AdditionalMessage::Attachment::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!method initialize(file_id: nil, tools: nil) # @param file_id [String] # @param tools [Array] diff --git a/lib/openai/models/beta/threads/run_list_params.rb b/lib/openai/models/beta/threads/run_list_params.rb index dd2e424a..85e39197 100644 --- a/lib/openai/models/beta/threads/run_list_params.rb +++ b/lib/openai/models/beta/threads/run_list_params.rb @@ -6,11 +6,10 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#list class RunListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -19,11 +18,7 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -32,32 +27,20 @@ class RunListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::Threads::RunListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::RunListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/beta/threads/run_retrieve_params.rb b/lib/openai/models/beta/threads/run_retrieve_params.rb index 464d303b..307672fd 100644 --- a/lib/openai/models/beta/threads/run_retrieve_params.rb +++ b/lib/openai/models/beta/threads/run_retrieve_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#retrieve class RunRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb index 16181aa3..34faf0fa 100644 --- a/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb +++ b/lib/openai/models/beta/threads/run_submit_tool_outputs_params.rb @@ -8,8 +8,7 @@ module Threads # # @see OpenAI::Resources::Beta::Threads::Runs#submit_tool_outputs_stream_raw class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id @@ -30,27 +29,19 @@ class RunSubmitToolOutputsParams < OpenAI::Internal::Type::BaseModel # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] class ToolOutput < OpenAI::Internal::Type::BaseModel - # @!attribute [r] output + # @!attribute output # The output of the tool call to be submitted to continue the run. # # @return [String, nil] optional :output, String - # @!parse - # # @return [String] - # attr_writer :output - - # @!attribute [r] tool_call_id + # @!attribute tool_call_id # The ID of the tool call in the `required_action` object within the run object # the output is being submitted for. # # @return [String, nil] optional :tool_call_id, String - # @!parse - # # @return [String] - # attr_writer :tool_call_id - # @!method initialize(output: nil, tool_call_id: nil) # @param output [String] # @param tool_call_id [String] diff --git a/lib/openai/models/beta/threads/run_update_params.rb b/lib/openai/models/beta/threads/run_update_params.rb index ddfe60d0..2d418080 100644 --- a/lib/openai/models/beta/threads/run_update_params.rb +++ b/lib/openai/models/beta/threads/run_update_params.rb @@ -6,8 +6,7 @@ module Beta module Threads # @see OpenAI::Resources::Beta::Threads::Runs#update class RunUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb index e1c7ac7f..ad3f6c66 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_logs.rb @@ -18,16 +18,12 @@ class CodeInterpreterLogs < OpenAI::Internal::Type::BaseModel # @return [Symbol, :logs] required :type, const: :logs - # @!attribute [r] logs + # @!attribute logs # The text output from the Code Interpreter tool call. # # @return [String, nil] optional :logs, String - # @!parse - # # @return [String] - # attr_writer :logs - # @!method initialize(index:, logs: nil, type: :logs) # Text output from the Code Interpreter tool call as part of a run step. # diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb index d46e7d33..7e0c8ae1 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_output_image.rb @@ -18,15 +18,11 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # @return [Symbol, :image] required :type, const: :image - # @!attribute [r] image + # @!attribute image # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image, nil] optional :image, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] - # attr_writer :image - # @!method initialize(index:, image: nil, type: :image) # @param index [Integer] # @param image [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage::Image] @@ -34,17 +30,13 @@ class CodeInterpreterOutputImage < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterOutputImage#image class Image < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_id + # @!attribute file_id # The [file](https://platform.openai.com/docs/api-reference/files) ID of the # image. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - # @!method initialize(file_id: nil) # @param file_id [String] end diff --git a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb index 3b041752..84e66baf 100644 --- a/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/code_interpreter_tool_call_delta.rb @@ -19,27 +19,19 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :code_interpreter] required :type, const: :code_interpreter - # @!attribute [r] id + # @!attribute id # The ID of the tool call. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] code_interpreter + # @!attribute code_interpreter # The Code Interpreter tool call definition. # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter, nil] optional :code_interpreter, -> { OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter] - # attr_writer :code_interpreter - # @!method initialize(index:, id: nil, code_interpreter: nil, type: :code_interpreter) # Details of the Code Interpreter tool call the run step was involved in. # @@ -50,17 +42,13 @@ class CodeInterpreterToolCallDelta < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta#code_interpreter class CodeInterpreter < OpenAI::Internal::Type::BaseModel - # @!attribute [r] input + # @!attribute input # The input to the Code Interpreter tool call. # # @return [String, nil] optional :input, String - # @!parse - # # @return [String] - # attr_writer :input - - # @!attribute [r] outputs + # @!attribute outputs # The outputs from the Code Interpreter tool call. Code Interpreter can output one # or more items, including text (`logs`) or images (`image`). Each of these are # represented by a different object type. @@ -69,10 +57,6 @@ class CodeInterpreter < OpenAI::Internal::Type::BaseModel optional :outputs, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::CodeInterpreterToolCallDelta::CodeInterpreter::Output] } - # @!parse - # # @return [Array] - # attr_writer :outputs - # @!method initialize(input: nil, outputs: nil) # The Code Interpreter tool call definition. # diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb index 9ab8ead6..f42b150c 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call.rb @@ -32,28 +32,20 @@ class FileSearchToolCall < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall#file_search class FileSearch < OpenAI::Internal::Type::BaseModel - # @!attribute [r] ranking_options + # @!attribute ranking_options # The ranking options for the file search. # # @return [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::RankingOptions] - # attr_writer :ranking_options - - # @!attribute [r] results + # @!attribute results # The results of the file search. # # @return [Array, nil] optional :results, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result] } - # @!parse - # # @return [Array] - # attr_writer :results - # @!method initialize(ranking_options: nil, results: nil) # For now, this is always going to be an empty object. # @@ -118,7 +110,7 @@ class Result < OpenAI::Internal::Type::BaseModel # @return [Float] required :score, Float - # @!attribute [r] content + # @!attribute content # The content of the result that was found. The content is only included if # requested via the include query parameter. # @@ -126,10 +118,6 @@ class Result < OpenAI::Internal::Type::BaseModel optional :content, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content] } - # @!parse - # # @return [Array] - # attr_writer :content - # @!method initialize(file_id:, file_name:, score:, content: nil) # A result instance of the file search. # @@ -139,27 +127,19 @@ class Result < OpenAI::Internal::Type::BaseModel # @param content [Array] class Content < OpenAI::Internal::Type::BaseModel - # @!attribute [r] text + # @!attribute text # The text content of the file. # # @return [String, nil] optional :text, String - # @!parse - # # @return [String] - # attr_writer :text - - # @!attribute [r] type + # @!attribute type # The type of the content. # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type, nil] optional :type, enum: -> { OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] - # attr_writer :type - # @!method initialize(text: nil, type: nil) # @param text [String] # @param type [Symbol, OpenAI::Models::Beta::Threads::Runs::FileSearchToolCall::FileSearch::Result::Content::Type] diff --git a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb index c4e4e5e3..31b50baf 100644 --- a/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/file_search_tool_call_delta.rb @@ -25,16 +25,12 @@ class FileSearchToolCallDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :file_search] required :type, const: :file_search - # @!attribute [r] id + # @!attribute id # The ID of the tool call object. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - # @!method initialize(file_search:, index:, id: nil, type: :file_search) # @param file_search [Object] # @param index [Integer] diff --git a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb index fe7116d5..c9af620f 100644 --- a/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb +++ b/lib/openai/models/beta/threads/runs/function_tool_call_delta.rb @@ -19,26 +19,18 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function] required :type, const: :function - # @!attribute [r] id + # @!attribute id # The ID of the tool call object. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] function + # @!attribute function # The definition of the function that was called. # # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function, nil] optional :function, -> { OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta::Function] - # attr_writer :function - # @!method initialize(index:, id: nil, function: nil, type: :function) # @param index [Integer] # @param id [String] @@ -47,26 +39,18 @@ class FunctionToolCallDelta < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Runs::FunctionToolCallDelta#function class Function < OpenAI::Internal::Type::BaseModel - # @!attribute [r] arguments + # @!attribute arguments # The arguments passed to the function. # # @return [String, nil] optional :arguments, String - # @!parse - # # @return [String] - # attr_writer :arguments - - # @!attribute [r] name + # @!attribute name # The name of the function. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!attribute output # The output of the function. This will be `null` if the outputs have not been # [submitted](https://platform.openai.com/docs/api-reference/runs/submitToolOutputs) diff --git a/lib/openai/models/beta/threads/runs/run_step_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta.rb index 10ae040b..ec46591a 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta.rb @@ -6,16 +6,12 @@ module Beta module Threads module Runs class RunStepDelta < OpenAI::Internal::Type::BaseModel - # @!attribute [r] step_details + # @!attribute step_details # The details of the run step. # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject, nil] optional :step_details, union: -> { OpenAI::Models::Beta::Threads::Runs::RunStepDelta::StepDetails } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta, OpenAI::Models::Beta::Threads::Runs::ToolCallDeltaObject] - # attr_writer :step_details - # @!method initialize(step_details: nil) # The delta containing the fields that have changed on the run step. # diff --git a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb index c57bba4e..4335b875 100644 --- a/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb +++ b/lib/openai/models/beta/threads/runs/run_step_delta_message_delta.rb @@ -12,16 +12,12 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @return [Symbol, :message_creation] required :type, const: :message_creation - # @!attribute [r] message_creation + # @!attribute message_creation # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation, nil] optional :message_creation, -> { OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta::MessageCreation] - # attr_writer :message_creation - # @!method initialize(message_creation: nil, type: :message_creation) # Details of the message creation by the run step. # @@ -30,16 +26,12 @@ class RunStepDeltaMessageDelta < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Beta::Threads::Runs::RunStepDeltaMessageDelta#message_creation class MessageCreation < OpenAI::Internal::Type::BaseModel - # @!attribute [r] message_id + # @!attribute message_id # The ID of the message that was created by this run step. # # @return [String, nil] optional :message_id, String - # @!parse - # # @return [String] - # attr_writer :message_id - # @!method initialize(message_id: nil) # @param message_id [String] end diff --git a/lib/openai/models/beta/threads/runs/step_list_params.rb b/lib/openai/models/beta/threads/runs/step_list_params.rb index 4f484a34..75b02311 100644 --- a/lib/openai/models/beta/threads/runs/step_list_params.rb +++ b/lib/openai/models/beta/threads/runs/step_list_params.rb @@ -7,8 +7,7 @@ module Threads module Runs # @see OpenAI::Resources::Beta::Threads::Runs::Steps#list class StepListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id @@ -16,7 +15,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @return [String] required :thread_id, String - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -25,11 +24,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -38,11 +33,7 @@ class StepListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] include + # @!attribute include # A list of additional fields to include in the response. Currently the only # supported value is `step_details.tool_calls[*].file_search.results[*].content` # to fetch the file search result content. @@ -55,32 +46,20 @@ class StepListParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } - # @!parse - # # @return [Array] - # attr_writer :include - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Beta::Threads::Runs::StepListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Beta::Threads::Runs::StepListParams::Order] - # attr_writer :order - # @!method initialize(thread_id:, after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) # @param thread_id [String] # @param after [String] diff --git a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb index 3e6934b7..00db2d8a 100644 --- a/lib/openai/models/beta/threads/runs/step_retrieve_params.rb +++ b/lib/openai/models/beta/threads/runs/step_retrieve_params.rb @@ -7,8 +7,7 @@ module Threads module Runs # @see OpenAI::Resources::Beta::Threads::Runs::Steps#retrieve class StepRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute thread_id @@ -21,7 +20,7 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel # @return [String] required :run_id, String - # @!attribute [r] include + # @!attribute include # A list of additional fields to include in the response. Currently the only # supported value is `step_details.tool_calls[*].file_search.results[*].content` # to fetch the file search result content. @@ -34,10 +33,6 @@ class StepRetrieveParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Beta::Threads::Runs::RunStepInclude] } - # @!parse - # # @return [Array] - # attr_writer :include - # @!method initialize(thread_id:, run_id:, include: nil, request_options: {}) # @param thread_id [String] # @param run_id [String] diff --git a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb index ca02ab4c..417a924a 100644 --- a/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb +++ b/lib/openai/models/beta/threads/runs/tool_call_delta_object.rb @@ -12,7 +12,7 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel # @return [Symbol, :tool_calls] required :type, const: :tool_calls - # @!attribute [r] tool_calls + # @!attribute tool_calls # An array of tool calls the run step was involved in. These can be associated # with one of three types of tools: `code_interpreter`, `file_search`, or # `function`. @@ -21,10 +21,6 @@ class ToolCallDeltaObject < OpenAI::Internal::Type::BaseModel optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::Runs::ToolCallDelta] } - # @!parse - # # @return [Array] - # attr_writer :tool_calls - # @!method initialize(tool_calls: nil, type: :tool_calls) # Details of the tool call. # diff --git a/lib/openai/models/beta/threads/text_delta.rb b/lib/openai/models/beta/threads/text_delta.rb index cbac0c09..2d767df4 100644 --- a/lib/openai/models/beta/threads/text_delta.rb +++ b/lib/openai/models/beta/threads/text_delta.rb @@ -5,26 +5,18 @@ module Models module Beta module Threads class TextDelta < OpenAI::Internal::Type::BaseModel - # @!attribute [r] annotations + # @!attribute annotations # # @return [Array, nil] optional :annotations, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Beta::Threads::AnnotationDelta] } - # @!parse - # # @return [Array] - # attr_writer :annotations - - # @!attribute [r] value + # @!attribute value # The data that makes up the text. # # @return [String, nil] optional :value, String - # @!parse - # # @return [String] - # attr_writer :value - # @!method initialize(annotations: nil, value: nil) # @param annotations [Array] # @param value [String] diff --git a/lib/openai/models/beta/threads/text_delta_block.rb b/lib/openai/models/beta/threads/text_delta_block.rb index 12701fae..7191d790 100644 --- a/lib/openai/models/beta/threads/text_delta_block.rb +++ b/lib/openai/models/beta/threads/text_delta_block.rb @@ -17,15 +17,11 @@ class TextDeltaBlock < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text] required :type, const: :text - # @!attribute [r] text + # @!attribute text # # @return [OpenAI::Models::Beta::Threads::TextDelta, nil] optional :text, -> { OpenAI::Models::Beta::Threads::TextDelta } - # @!parse - # # @return [OpenAI::Models::Beta::Threads::TextDelta] - # attr_writer :text - # @!method initialize(index:, text: nil, type: :text) # The text content that is part of a message. # diff --git a/lib/openai/models/chat/chat_completion.rb b/lib/openai/models/chat/chat_completion.rb index 8e808b6e..2e58ff3d 100644 --- a/lib/openai/models/chat/chat_completion.rb +++ b/lib/openai/models/chat/chat_completion.rb @@ -60,7 +60,7 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Chat::ChatCompletion::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Models::Chat::ChatCompletion::ServiceTier }, nil?: true - # @!attribute [r] system_fingerprint + # @!attribute system_fingerprint # This fingerprint represents the backend configuration that the model runs with. # # Can be used in conjunction with the `seed` request parameter to understand when @@ -69,20 +69,12 @@ class ChatCompletion < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :system_fingerprint, String - # @!parse - # # @return [String] - # attr_writer :system_fingerprint - - # @!attribute [r] usage + # @!attribute usage # Usage statistics for the completion request. # # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::Models::CompletionUsage } - # @!parse - # # @return [OpenAI::Models::CompletionUsage] - # attr_writer :usage - # @!method initialize(id:, choices:, created:, model:, service_tier: nil, system_fingerprint: nil, usage: nil, object: :"chat.completion") # Represents a chat completion response returned by model, based on the provided # input. diff --git a/lib/openai/models/chat/chat_completion_assistant_message_param.rb b/lib/openai/models/chat/chat_completion_assistant_message_param.rb index bf6a5b2e..8cd2c5c2 100644 --- a/lib/openai/models/chat/chat_completion_assistant_message_param.rb +++ b/lib/openai/models/chat/chat_completion_assistant_message_param.rb @@ -35,34 +35,26 @@ class ChatCompletionAssistantMessageParam < OpenAI::Internal::Type::BaseModel -> { OpenAI::Models::Chat::ChatCompletionAssistantMessageParam::FunctionCall }, nil?: true - # @!attribute [r] name + # @!attribute name # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!attribute refusal # The refusal message by the assistant. # # @return [String, nil] optional :refusal, String, nil?: true - # @!attribute [r] tool_calls + # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } - # @!parse - # # @return [Array] - # attr_writer :tool_calls - # @!method initialize(audio: nil, content: nil, function_call: nil, name: nil, refusal: nil, tool_calls: nil, role: :assistant) # Messages sent by the model in response to user messages. # diff --git a/lib/openai/models/chat/chat_completion_chunk.rb b/lib/openai/models/chat/chat_completion_chunk.rb index 61a57392..77585995 100644 --- a/lib/openai/models/chat/chat_completion_chunk.rb +++ b/lib/openai/models/chat/chat_completion_chunk.rb @@ -60,7 +60,7 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::ServiceTier }, nil?: true - # @!attribute [r] system_fingerprint + # @!attribute system_fingerprint # This fingerprint represents the backend configuration that the model runs with. # Can be used in conjunction with the `seed` request parameter to understand when # backend changes have been made that might impact determinism. @@ -68,10 +68,6 @@ class ChatCompletionChunk < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :system_fingerprint, String - # @!parse - # # @return [String] - # attr_writer :system_fingerprint - # @!attribute usage # An optional field that will only be present when you set # `stream_options: {"include_usage": true}` in your request. When present, it @@ -144,43 +140,31 @@ class Delta < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :content, String, nil?: true - # @!attribute [r] function_call + # @!attribute function_call # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall, nil] optional :function_call, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall } - # @!parse - # # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::FunctionCall] - # attr_writer :function_call - # @!attribute refusal # The refusal message generated by the model. # # @return [String, nil] optional :refusal, String, nil?: true - # @!attribute [r] role + # @!attribute role # The role of the author of this message. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role, nil] optional :role, enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::Role] - # attr_writer :role - - # @!attribute [r] tool_calls + # @!attribute tool_calls # # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall] } - # @!parse - # # @return [Array] - # attr_writer :tool_calls - # @!method initialize(content: nil, function_call: nil, refusal: nil, role: nil, tool_calls: nil) # A chat completion delta generated by streamed model responses. # @@ -194,7 +178,7 @@ class Delta < OpenAI::Internal::Type::BaseModel # # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta#function_call class FunctionCall < OpenAI::Internal::Type::BaseModel - # @!attribute [r] arguments + # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -203,20 +187,12 @@ class FunctionCall < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :arguments, String - # @!parse - # # @return [String] - # attr_writer :arguments - - # @!attribute [r] name + # @!attribute name # The name of the function to call. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(arguments: nil, name: nil) # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. @@ -247,35 +223,23 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # @return [Integer] required :index, Integer - # @!attribute [r] id + # @!attribute id # The ID of the tool call. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] function + # @!attribute function # # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function, nil] optional :function, -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function } - # @!parse - # # @return [OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Function] - # attr_writer :function - - # @!attribute [r] type + # @!attribute type # The type of the tool. Currently, only `function` is supported. # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type, nil] optional :type, enum: -> { OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall::Type] - # attr_writer :type - # @!method initialize(index:, id: nil, function: nil, type: nil) # @param index [Integer] # @param id [String] @@ -284,7 +248,7 @@ class ToolCall < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Chat::ChatCompletionChunk::Choice::Delta::ToolCall#function class Function < OpenAI::Internal::Type::BaseModel - # @!attribute [r] arguments + # @!attribute arguments # The arguments to call the function with, as generated by the model in JSON # format. Note that the model does not always generate valid JSON, and may # hallucinate parameters not defined by your function schema. Validate the @@ -293,20 +257,12 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :arguments, String - # @!parse - # # @return [String] - # attr_writer :arguments - - # @!attribute [r] name + # @!attribute name # The name of the function to call. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(arguments: nil, name: nil) # @param arguments [String] # @param name [String] diff --git a/lib/openai/models/chat/chat_completion_content_part.rb b/lib/openai/models/chat/chat_completion_content_part.rb index af75a6c6..ecfb1d58 100644 --- a/lib/openai/models/chat/chat_completion_content_part.rb +++ b/lib/openai/models/chat/chat_completion_content_part.rb @@ -43,37 +43,25 @@ class File < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Chat::ChatCompletionContentPart::File#file class File < OpenAI::Internal::Type::BaseModel - # @!attribute [r] file_data + # @!attribute file_data # The base64 encoded file data, used when passing the file to the model as a # string. # # @return [String, nil] optional :file_data, String - # @!parse - # # @return [String] - # attr_writer :file_data - - # @!attribute [r] file_id + # @!attribute file_id # The ID of an uploaded file to use as input. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] filename + # @!attribute filename # The name of the file, used when passing the file to the model as a string. # # @return [String, nil] optional :filename, String - # @!parse - # # @return [String] - # attr_writer :filename - # @!method initialize(file_data: nil, file_id: nil, filename: nil) # @param file_data [String] # @param file_id [String] diff --git a/lib/openai/models/chat/chat_completion_content_part_image.rb b/lib/openai/models/chat/chat_completion_content_part_image.rb index 023fa1d0..06c450d2 100644 --- a/lib/openai/models/chat/chat_completion_content_part_image.rb +++ b/lib/openai/models/chat/chat_completion_content_part_image.rb @@ -29,17 +29,13 @@ class ImageURL < OpenAI::Internal::Type::BaseModel # @return [String] required :url, String - # @!attribute [r] detail + # @!attribute detail # Specifies the detail level of the image. Learn more in the # [Vision guide](https://platform.openai.com/docs/guides/vision#low-or-high-fidelity-image-understanding). # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail, nil] optional :detail, enum: -> { OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] - # attr_writer :detail - # @!method initialize(url:, detail: nil) # @param url [String] # @param detail [Symbol, OpenAI::Models::Chat::ChatCompletionContentPartImage::ImageURL::Detail] diff --git a/lib/openai/models/chat/chat_completion_developer_message_param.rb b/lib/openai/models/chat/chat_completion_developer_message_param.rb index 1eb265ea..2c7e20e4 100644 --- a/lib/openai/models/chat/chat_completion_developer_message_param.rb +++ b/lib/openai/models/chat/chat_completion_developer_message_param.rb @@ -16,17 +16,13 @@ class ChatCompletionDeveloperMessageParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :developer] required :role, const: :developer - # @!attribute [r] name + # @!attribute name # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(content:, name: nil, role: :developer) # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, `developer` messages diff --git a/lib/openai/models/chat/chat_completion_message.rb b/lib/openai/models/chat/chat_completion_message.rb index 63485db1..3d874f33 100644 --- a/lib/openai/models/chat/chat_completion_message.rb +++ b/lib/openai/models/chat/chat_completion_message.rb @@ -22,7 +22,7 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # @return [Symbol, :assistant] required :role, const: :assistant - # @!attribute [r] annotations + # @!attribute annotations # Annotations for the message, when applicable, as when using the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). # @@ -30,10 +30,6 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel optional :annotations, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessage::Annotation] } - # @!parse - # # @return [Array] - # attr_writer :annotations - # @!attribute audio # If the audio output modality is requested, this object contains data about the # audio response from the model. @@ -42,28 +38,20 @@ class ChatCompletionMessage < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::ChatCompletionAudio, nil] optional :audio, -> { OpenAI::Models::Chat::ChatCompletionAudio }, nil?: true - # @!attribute [r] function_call + # @!attribute function_call # Deprecated and replaced by `tool_calls`. The name and arguments of a function # that should be called, as generated by the model. # # @return [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall, nil] optional :function_call, -> { OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall } - # @!parse - # # @return [OpenAI::Models::Chat::ChatCompletionMessage::FunctionCall] - # attr_writer :function_call - - # @!attribute [r] tool_calls + # @!attribute tool_calls # The tool calls generated by the model, such as function calls. # # @return [Array, nil] optional :tool_calls, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionMessageToolCall] } - # @!parse - # # @return [Array] - # attr_writer :tool_calls - # @!method initialize(content:, refusal:, annotations: nil, audio: nil, function_call: nil, tool_calls: nil, role: :assistant) # A chat completion message generated by the model. # diff --git a/lib/openai/models/chat/chat_completion_stream_options.rb b/lib/openai/models/chat/chat_completion_stream_options.rb index 04006615..4bc8cef1 100644 --- a/lib/openai/models/chat/chat_completion_stream_options.rb +++ b/lib/openai/models/chat/chat_completion_stream_options.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Chat class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel - # @!attribute [r] include_usage + # @!attribute include_usage # If set, an additional chunk will be streamed before the `data: [DONE]` message. # The `usage` field on this chunk shows the token usage statistics for the entire # request, and the `choices` field will always be an empty array. @@ -16,10 +16,6 @@ class ChatCompletionStreamOptions < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :include_usage, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :include_usage - # @!method initialize(include_usage: nil) # Options for streaming response. Only set this when you set `stream: true`. # diff --git a/lib/openai/models/chat/chat_completion_system_message_param.rb b/lib/openai/models/chat/chat_completion_system_message_param.rb index bb2c91b6..19d5d598 100644 --- a/lib/openai/models/chat/chat_completion_system_message_param.rb +++ b/lib/openai/models/chat/chat_completion_system_message_param.rb @@ -16,17 +16,13 @@ class ChatCompletionSystemMessageParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :system] required :role, const: :system - # @!attribute [r] name + # @!attribute name # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(content:, name: nil, role: :system) # Developer-provided instructions that the model should follow, regardless of # messages sent by the user. With o1 models and newer, use `developer` messages diff --git a/lib/openai/models/chat/chat_completion_user_message_param.rb b/lib/openai/models/chat/chat_completion_user_message_param.rb index 34b81339..7f72cdc3 100644 --- a/lib/openai/models/chat/chat_completion_user_message_param.rb +++ b/lib/openai/models/chat/chat_completion_user_message_param.rb @@ -16,17 +16,13 @@ class ChatCompletionUserMessageParam < OpenAI::Internal::Type::BaseModel # @return [Symbol, :user] required :role, const: :user - # @!attribute [r] name + # @!attribute name # An optional name for the participant. Provides the model information to # differentiate between participants of the same role. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(content:, name: nil, role: :user) # Messages sent by an end user, containing prompts or additional context # information. diff --git a/lib/openai/models/chat/completion_create_params.rb b/lib/openai/models/chat/completion_create_params.rb index fff48ea0..999209e1 100644 --- a/lib/openai/models/chat/completion_create_params.rb +++ b/lib/openai/models/chat/completion_create_params.rb @@ -7,8 +7,7 @@ module Chat # # @see OpenAI::Resources::Chat::Completions#stream_raw class CompletionCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute messages @@ -49,7 +48,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :frequency_penalty, Float, nil?: true - # @!attribute [r] function_call + # @!attribute function_call # Deprecated in favor of `tool_choice`. # # Controls which (if any) function is called by the model. @@ -68,11 +67,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption, nil] optional :function_call, union: -> { OpenAI::Models::Chat::CompletionCreateParams::FunctionCall } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::FunctionCall::FunctionCallMode, OpenAI::Models::Chat::ChatCompletionFunctionCallOption] - # attr_writer :function_call - - # @!attribute [r] functions + # @!attribute functions # Deprecated in favor of `tools`. # # A list of functions the model may generate JSON inputs for. @@ -81,10 +76,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel optional :functions, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::CompletionCreateParams::Function] } - # @!parse - # # @return [Array] - # attr_writer :functions - # @!attribute logit_bias # Modify the likelihood of specified tokens appearing in the completion. # @@ -162,7 +153,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :n, Integer, nil?: true - # @!attribute [r] parallel_tool_calls + # @!attribute parallel_tool_calls # Whether to enable # [parallel function calling](https://platform.openai.com/docs/guides/function-calling#configuring-parallel-function-calling) # during tool use. @@ -170,10 +161,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Boolean, nil] optional :parallel_tool_calls, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :parallel_tool_calls - # @!attribute prediction # Static predicted output content, such as the content of a text file that is # being regenerated. @@ -200,7 +187,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ReasoningEffort, nil] optional :reasoning_effort, enum: -> { OpenAI::Models::ReasoningEffort }, nil?: true - # @!attribute [r] response_format + # @!attribute response_format # An object specifying the format that the model must output. # # Setting to `{ "type": "json_schema", "json_schema": {...} }` enables Structured @@ -215,10 +202,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject, nil] optional :response_format, union: -> { OpenAI::Models::Chat::CompletionCreateParams::ResponseFormat } - # @!parse - # # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::ResponseFormatJSONSchema, OpenAI::Models::ResponseFormatJSONObject] - # attr_writer :response_format - # @!attribute seed # This feature is in Beta. If specified, our system will make a best effort to # sample deterministically, such that repeated requests with the same `seed` and @@ -283,7 +266,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float, nil?: true - # @!attribute [r] tool_choice + # @!attribute tool_choice # Controls which (if any) tool is called by the model. `none` means the model will # not call any tool and instead generates a message. `auto` means the model can # pick between generating a message or calling one or more tools. `required` means @@ -297,11 +280,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice, nil] optional :tool_choice, union: -> { OpenAI::Models::Chat::ChatCompletionToolChoiceOption } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::ChatCompletionToolChoiceOption::Auto, OpenAI::Models::Chat::ChatCompletionNamedToolChoice] - # attr_writer :tool_choice - - # @!attribute [r] tools + # @!attribute tools # A list of tools the model may call. Currently, only functions are supported as a # tool. Use this to provide a list of functions the model may generate JSON inputs # for. A max of 128 functions are supported. @@ -309,10 +288,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Chat::ChatCompletionTool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!attribute top_logprobs # An integer between 0 and 20 specifying the number of most likely tokens to # return at each token position, each with an associated log probability. @@ -331,7 +306,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -339,11 +314,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - - # @!attribute [r] web_search_options + # @!attribute web_search_options # This tool searches the web for relevant results to use in a response. Learn more # about the # [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). @@ -351,10 +322,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions, nil] optional :web_search_options, -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions } - # @!parse - # # @return [OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions] - # attr_writer :web_search_options - # @!method initialize(messages:, model:, audio: nil, frequency_penalty: nil, function_call: nil, functions: nil, logit_bias: nil, logprobs: nil, max_completion_tokens: nil, max_tokens: nil, metadata: nil, modalities: nil, n: nil, parallel_tool_calls: nil, prediction: nil, presence_penalty: nil, reasoning_effort: nil, response_format: nil, seed: nil, service_tier: nil, stop: nil, store: nil, stream_options: nil, temperature: nil, tool_choice: nil, tools: nil, top_logprobs: nil, top_p: nil, user: nil, web_search_options: nil, request_options: {}) # @param messages [Array] # @param model [String, Symbol, OpenAI::Models::ChatModel] @@ -459,18 +426,14 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!attribute [r] description + # @!attribute description # A description of what the function does, used by the model to choose when and # how to call the function. # # @return [String, nil] optional :description, String - # @!parse - # # @return [String] - # attr_writer :description - - # @!attribute [r] parameters + # @!attribute parameters # The parameters the functions accepts, described as a JSON Schema object. See the # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, # and the @@ -482,10 +445,6 @@ class Function < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}, nil] optional :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Hash{Symbol=>Object}] - # attr_writer :parameters - # @!method initialize(name:, description: nil, parameters: nil) # @param name [String] # @param description [String] @@ -578,7 +537,7 @@ module Stop end class WebSearchOptions < OpenAI::Internal::Type::BaseModel - # @!attribute [r] search_context_size + # @!attribute search_context_size # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # @@ -586,10 +545,6 @@ class WebSearchOptions < OpenAI::Internal::Type::BaseModel optional :search_context_size, enum: -> { OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::SearchContextSize] - # attr_writer :search_context_size - # @!attribute user_location # Approximate location parameters for the search. # @@ -644,48 +599,32 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Chat::CompletionCreateParams::WebSearchOptions::UserLocation#approximate class Approximate < OpenAI::Internal::Type::BaseModel - # @!attribute [r] city + # @!attribute city # Free text input for the city of the user, e.g. `San Francisco`. # # @return [String, nil] optional :city, String - # @!parse - # # @return [String] - # attr_writer :city - - # @!attribute [r] country + # @!attribute country # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of # the user, e.g. `US`. # # @return [String, nil] optional :country, String - # @!parse - # # @return [String] - # attr_writer :country - - # @!attribute [r] region + # @!attribute region # Free text input for the region of the user, e.g. `California`. # # @return [String, nil] optional :region, String - # @!parse - # # @return [String] - # attr_writer :region - - # @!attribute [r] timezone + # @!attribute timezone # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the # user, e.g. `America/Los_Angeles`. # # @return [String, nil] optional :timezone, String - # @!parse - # # @return [String] - # attr_writer :timezone - # @!method initialize(city: nil, country: nil, region: nil, timezone: nil) # Approximate location parameters for the search. # diff --git a/lib/openai/models/chat/completion_delete_params.rb b/lib/openai/models/chat/completion_delete_params.rb index c32fe53d..819d9af4 100644 --- a/lib/openai/models/chat/completion_delete_params.rb +++ b/lib/openai/models/chat/completion_delete_params.rb @@ -5,8 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#delete class CompletionDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/chat/completion_list_params.rb b/lib/openai/models/chat/completion_list_params.rb index 911b8a36..ab7f9060 100644 --- a/lib/openai/models/chat/completion_list_params.rb +++ b/lib/openai/models/chat/completion_list_params.rb @@ -5,30 +5,21 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#list class CompletionListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last chat completion from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of Chat Completions to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - # @!attribute metadata # A list of metadata keys to filter the Chat Completions by. Example: # @@ -37,27 +28,19 @@ class CompletionListParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] model + # @!attribute model # The model used to generate the Chat Completions. # # @return [String, nil] optional :model, String - # @!parse - # # @return [String] - # attr_writer :model - - # @!attribute [r] order + # @!attribute order # Sort order for Chat Completions by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. # # @return [Symbol, OpenAI::Models::Chat::CompletionListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Chat::CompletionListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::CompletionListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, limit: nil, metadata: nil, model: nil, order: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/chat/completion_retrieve_params.rb b/lib/openai/models/chat/completion_retrieve_params.rb index ccdba91c..50730021 100644 --- a/lib/openai/models/chat/completion_retrieve_params.rb +++ b/lib/openai/models/chat/completion_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#retrieve class CompletionRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/chat/completion_update_params.rb b/lib/openai/models/chat/completion_update_params.rb index 954a4400..b3a23cff 100644 --- a/lib/openai/models/chat/completion_update_params.rb +++ b/lib/openai/models/chat/completion_update_params.rb @@ -5,8 +5,7 @@ module Models module Chat # @see OpenAI::Resources::Chat::Completions#update class CompletionUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute metadata diff --git a/lib/openai/models/chat/completions/message_list_params.rb b/lib/openai/models/chat/completions/message_list_params.rb index 02329b59..19a29905 100644 --- a/lib/openai/models/chat/completions/message_list_params.rb +++ b/lib/openai/models/chat/completions/message_list_params.rb @@ -6,41 +6,28 @@ module Chat module Completions # @see OpenAI::Resources::Chat::Completions::Messages#list class MessageListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last message from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of messages to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order for messages by timestamp. Use `asc` for ascending order or `desc` # for descending order. Defaults to `asc`. # # @return [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Chat::Completions::MessageListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Chat::Completions::MessageListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/completion.rb b/lib/openai/models/completion.rb index e31c3bbf..4980830d 100644 --- a/lib/openai/models/completion.rb +++ b/lib/openai/models/completion.rb @@ -36,7 +36,7 @@ class Completion < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text_completion] required :object, const: :text_completion - # @!attribute [r] system_fingerprint + # @!attribute system_fingerprint # This fingerprint represents the backend configuration that the model runs with. # # Can be used in conjunction with the `seed` request parameter to understand when @@ -45,20 +45,12 @@ class Completion < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :system_fingerprint, String - # @!parse - # # @return [String] - # attr_writer :system_fingerprint - - # @!attribute [r] usage + # @!attribute usage # Usage statistics for the completion request. # # @return [OpenAI::Models::CompletionUsage, nil] optional :usage, -> { OpenAI::Models::CompletionUsage } - # @!parse - # # @return [OpenAI::Models::CompletionUsage] - # attr_writer :usage - # @!method initialize(id:, choices:, created:, model:, system_fingerprint: nil, usage: nil, object: :text_completion) # Represents a completion response from the API. Note: both the streamed and # non-streamed response objects share the same shape (unlike the chat endpoint). diff --git a/lib/openai/models/completion_choice.rb b/lib/openai/models/completion_choice.rb index 096074a0..3fcd5752 100644 --- a/lib/openai/models/completion_choice.rb +++ b/lib/openai/models/completion_choice.rb @@ -52,42 +52,26 @@ module FinishReason # @see OpenAI::Models::CompletionChoice#logprobs class Logprobs < OpenAI::Internal::Type::BaseModel - # @!attribute [r] text_offset + # @!attribute text_offset # # @return [Array, nil] optional :text_offset, OpenAI::Internal::Type::ArrayOf[Integer] - # @!parse - # # @return [Array] - # attr_writer :text_offset - - # @!attribute [r] token_logprobs + # @!attribute token_logprobs # # @return [Array, nil] optional :token_logprobs, OpenAI::Internal::Type::ArrayOf[Float] - # @!parse - # # @return [Array] - # attr_writer :token_logprobs - - # @!attribute [r] tokens + # @!attribute tokens # # @return [Array, nil] optional :tokens, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :tokens - - # @!attribute [r] top_logprobs + # @!attribute top_logprobs # # @return [ArrayFloat}>, nil] optional :top_logprobs, OpenAI::Internal::Type::ArrayOf[OpenAI::Internal::Type::HashOf[Float]] - # @!parse - # # @return [ArrayFloat}>] - # attr_writer :top_logprobs - # @!method initialize(text_offset: nil, token_logprobs: nil, tokens: nil, top_logprobs: nil) # @param text_offset [Array] # @param token_logprobs [Array] diff --git a/lib/openai/models/completion_create_params.rb b/lib/openai/models/completion_create_params.rb index c1e3656e..7084d9f5 100644 --- a/lib/openai/models/completion_create_params.rb +++ b/lib/openai/models/completion_create_params.rb @@ -6,8 +6,7 @@ module Models # # @see OpenAI::Resources::Completions#create_streaming class CompletionCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute model @@ -175,7 +174,7 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :top_p, Float, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -183,10 +182,6 @@ class CompletionCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(model:, prompt:, best_of: nil, echo: nil, frequency_penalty: nil, logit_bias: nil, logprobs: nil, max_tokens: nil, n: nil, presence_penalty: nil, seed: nil, stop: nil, stream_options: nil, suffix: nil, temperature: nil, top_p: nil, user: nil, request_options: {}) # @param model [String, Symbol, OpenAI::Models::CompletionCreateParams::Model] # @param prompt [String, Array, Array, Array>, nil] diff --git a/lib/openai/models/completion_usage.rb b/lib/openai/models/completion_usage.rb index d8e75136..e2b4092d 100644 --- a/lib/openai/models/completion_usage.rb +++ b/lib/openai/models/completion_usage.rb @@ -21,26 +21,18 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # @return [Integer] required :total_tokens, Integer - # @!attribute [r] completion_tokens_details + # @!attribute completion_tokens_details # Breakdown of tokens used in a completion. # # @return [OpenAI::Models::CompletionUsage::CompletionTokensDetails, nil] optional :completion_tokens_details, -> { OpenAI::Models::CompletionUsage::CompletionTokensDetails } - # @!parse - # # @return [OpenAI::Models::CompletionUsage::CompletionTokensDetails] - # attr_writer :completion_tokens_details - - # @!attribute [r] prompt_tokens_details + # @!attribute prompt_tokens_details # Breakdown of tokens used in the prompt. # # @return [OpenAI::Models::CompletionUsage::PromptTokensDetails, nil] optional :prompt_tokens_details, -> { OpenAI::Models::CompletionUsage::PromptTokensDetails } - # @!parse - # # @return [OpenAI::Models::CompletionUsage::PromptTokensDetails] - # attr_writer :prompt_tokens_details - # @!method initialize(completion_tokens:, prompt_tokens:, total_tokens:, completion_tokens_details: nil, prompt_tokens_details: nil) # Usage statistics for the completion request. # @@ -52,38 +44,26 @@ class CompletionUsage < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::CompletionUsage#completion_tokens_details class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel - # @!attribute [r] accepted_prediction_tokens + # @!attribute accepted_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that # appeared in the completion. # # @return [Integer, nil] optional :accepted_prediction_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :accepted_prediction_tokens - - # @!attribute [r] audio_tokens + # @!attribute audio_tokens # Audio input tokens generated by the model. # # @return [Integer, nil] optional :audio_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :audio_tokens - - # @!attribute [r] reasoning_tokens + # @!attribute reasoning_tokens # Tokens generated by the model for reasoning. # # @return [Integer, nil] optional :reasoning_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :reasoning_tokens - - # @!attribute [r] rejected_prediction_tokens + # @!attribute rejected_prediction_tokens # When using Predicted Outputs, the number of tokens in the prediction that did # not appear in the completion. However, like reasoning tokens, these tokens are # still counted in the total completion tokens for purposes of billing, output, @@ -92,10 +72,6 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :rejected_prediction_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :rejected_prediction_tokens - # @!method initialize(accepted_prediction_tokens: nil, audio_tokens: nil, reasoning_tokens: nil, rejected_prediction_tokens: nil) # Breakdown of tokens used in a completion. # @@ -107,26 +83,18 @@ class CompletionTokensDetails < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::CompletionUsage#prompt_tokens_details class PromptTokensDetails < OpenAI::Internal::Type::BaseModel - # @!attribute [r] audio_tokens + # @!attribute audio_tokens # Audio input tokens present in the prompt. # # @return [Integer, nil] optional :audio_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :audio_tokens - - # @!attribute [r] cached_tokens + # @!attribute cached_tokens # Cached tokens present in the prompt. # # @return [Integer, nil] optional :cached_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :cached_tokens - # @!method initialize(audio_tokens: nil, cached_tokens: nil) # Breakdown of tokens used in the prompt. # diff --git a/lib/openai/models/embedding_create_params.rb b/lib/openai/models/embedding_create_params.rb index 2586d07f..ec1dfd3d 100644 --- a/lib/openai/models/embedding_create_params.rb +++ b/lib/openai/models/embedding_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Embeddings#create class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute input @@ -31,29 +30,21 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::EmbeddingModel] required :model, union: -> { OpenAI::Models::EmbeddingCreateParams::Model } - # @!attribute [r] dimensions + # @!attribute dimensions # The number of dimensions the resulting output embeddings should have. Only # supported in `text-embedding-3` and later models. # # @return [Integer, nil] optional :dimensions, Integer - # @!parse - # # @return [Integer] - # attr_writer :dimensions - - # @!attribute [r] encoding_format + # @!attribute encoding_format # The format to return the embeddings in. Can be either `float` or # [`base64`](https://pypi.org/project/pybase64/). # # @return [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat, nil] optional :encoding_format, enum: -> { OpenAI::Models::EmbeddingCreateParams::EncodingFormat } - # @!parse - # # @return [Symbol, OpenAI::Models::EmbeddingCreateParams::EncodingFormat] - # attr_writer :encoding_format - - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -61,10 +52,6 @@ class EmbeddingCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(input:, model:, dimensions: nil, encoding_format: nil, user: nil, request_options: {}) # @param input [String, Array, Array, Array>] # @param model [String, Symbol, OpenAI::Models::EmbeddingModel] diff --git a/lib/openai/models/eval_create_params.rb b/lib/openai/models/eval_create_params.rb index d66fae28..54d31b12 100644 --- a/lib/openai/models/eval_create_params.rb +++ b/lib/openai/models/eval_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Evals#create class EvalCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute data_source_config @@ -32,26 +31,18 @@ class EvalCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] name + # @!attribute name # The name of the evaluation. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - - # @!attribute [r] share_with_openai + # @!attribute share_with_openai # Indicates whether the evaluation is shared with OpenAI. # # @return [Boolean, nil] optional :share_with_openai, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :share_with_openai - # @!method initialize(data_source_config:, testing_criteria:, metadata: nil, name: nil, share_with_openai: nil, request_options: {}) # @param data_source_config [OpenAI::Models::EvalCreateParams::DataSourceConfig::Custom, OpenAI::Models::EvalCreateParams::DataSourceConfig::StoredCompletions] # @param testing_criteria [Array] @@ -89,16 +80,12 @@ class Custom < OpenAI::Internal::Type::BaseModel # @return [Symbol, :custom] required :type, const: :custom - # @!attribute [r] include_sample_schema + # @!attribute include_sample_schema # Whether to include the sample schema in the data source. # # @return [Boolean, nil] optional :include_sample_schema, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :include_sample_schema - # @!method initialize(item_schema:, include_sample_schema: nil, type: :custom) # A CustomDataSourceConfig object that defines the schema for the data source used # for the evaluation runs. This schema is used to define the shape of the data diff --git a/lib/openai/models/eval_delete_params.rb b/lib/openai/models/eval_delete_params.rb index 80e4d81d..7e1938d2 100644 --- a/lib/openai/models/eval_delete_params.rb +++ b/lib/openai/models/eval_delete_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Evals#delete class EvalDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/eval_list_params.rb b/lib/openai/models/eval_list_params.rb index e0d2fd84..3888ef6c 100644 --- a/lib/openai/models/eval_list_params.rb +++ b/lib/openai/models/eval_list_params.rb @@ -4,52 +4,35 @@ module OpenAI module Models # @see OpenAI::Resources::Evals#list class EvalListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last eval from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of evals to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order for evals by timestamp. Use `asc` for ascending order or `desc` for # descending order. # # @return [Symbol, OpenAI::Models::EvalListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::EvalListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::EvalListParams::Order] - # attr_writer :order - - # @!attribute [r] order_by + # @!attribute order_by # Evals can be ordered by creation time or last updated time. Use `created_at` for # creation time or `updated_at` for last updated time. # # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy, nil] optional :order_by, enum: -> { OpenAI::Models::EvalListParams::OrderBy } - # @!parse - # # @return [Symbol, OpenAI::Models::EvalListParams::OrderBy] - # attr_writer :order_by - # @!method initialize(after: nil, limit: nil, order: nil, order_by: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/eval_retrieve_params.rb b/lib/openai/models/eval_retrieve_params.rb index e06ffbe4..2dcaa7bb 100644 --- a/lib/openai/models/eval_retrieve_params.rb +++ b/lib/openai/models/eval_retrieve_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Evals#retrieve class EvalRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/eval_text_similarity_grader.rb b/lib/openai/models/eval_text_similarity_grader.rb index 9ff351b4..8d037316 100644 --- a/lib/openai/models/eval_text_similarity_grader.rb +++ b/lib/openai/models/eval_text_similarity_grader.rb @@ -34,16 +34,12 @@ class EvalTextSimilarityGrader < OpenAI::Internal::Type::BaseModel # @return [Symbol, :text_similarity] required :type, const: :text_similarity - # @!attribute [r] name + # @!attribute name # The name of the grader. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(evaluation_metric:, input:, pass_threshold:, reference:, name: nil, type: :text_similarity) # A TextSimilarityGrader object which grades text based on similarity metrics. # diff --git a/lib/openai/models/eval_update_params.rb b/lib/openai/models/eval_update_params.rb index 8572bf39..c8476dcf 100644 --- a/lib/openai/models/eval_update_params.rb +++ b/lib/openai/models/eval_update_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Evals#update class EvalUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute metadata @@ -19,16 +18,12 @@ class EvalUpdateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] name + # @!attribute name # Rename the evaluation. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(metadata: nil, name: nil, request_options: {}) # @param metadata [Hash{Symbol=>String}, nil] # @param name [String] diff --git a/lib/openai/models/evals/create_eval_completions_run_data_source.rb b/lib/openai/models/evals/create_eval_completions_run_data_source.rb index 56b9b732..ac306948 100644 --- a/lib/openai/models/evals/create_eval_completions_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_completions_run_data_source.rb @@ -28,16 +28,12 @@ class CreateEvalCompletionsRunDataSource < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type] required :type, enum: -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::Type } - # @!attribute [r] sampling_params + # @!attribute sampling_params # # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams, nil] optional :sampling_params, -> { OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams } - # @!parse - # # @return [OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource::SamplingParams] - # attr_writer :sampling_params - # @!method initialize(input_messages:, model:, source:, type:, sampling_params: nil) # A CompletionsRunDataSource object describing a model sampling configuration. # @@ -341,15 +337,11 @@ class Content < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}] required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!attribute [r] sample + # @!attribute sample # # @return [Hash{Symbol=>Object}, nil] optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Hash{Symbol=>Object}] - # attr_writer :sample - # @!method initialize(item:, sample: nil) # @param item [Hash{Symbol=>Object}] # @param sample [Hash{Symbol=>Object}] @@ -445,46 +437,30 @@ module Type # @see OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource#sampling_params class SamplingParams < OpenAI::Internal::Type::BaseModel - # @!attribute [r] max_completion_tokens + # @!attribute max_completion_tokens # The maximum number of tokens in the generated output. # # @return [Integer, nil] optional :max_completion_tokens, Integer - # @!parse - # # @return [Integer] - # attr_writer :max_completion_tokens - - # @!attribute [r] seed + # @!attribute seed # A seed value to initialize the randomness, during sampling. # # @return [Integer, nil] optional :seed, Integer - # @!parse - # # @return [Integer] - # attr_writer :seed - - # @!attribute [r] temperature + # @!attribute temperature # A higher temperature increases randomness in the outputs. # # @return [Float, nil] optional :temperature, Float - # @!parse - # # @return [Float] - # attr_writer :temperature - - # @!attribute [r] top_p + # @!attribute top_p # An alternative to temperature for nucleus sampling; 1.0 includes all tokens. # # @return [Float, nil] optional :top_p, Float - # @!parse - # # @return [Float] - # attr_writer :top_p - # @!method initialize(max_completion_tokens: nil, seed: nil, temperature: nil, top_p: nil) # @param max_completion_tokens [Integer] # @param seed [Integer] diff --git a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb index 3b06b922..2ddad8b0 100644 --- a/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb +++ b/lib/openai/models/evals/create_eval_jsonl_run_data_source.rb @@ -56,15 +56,11 @@ class Content < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}] required :item, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!attribute [r] sample + # @!attribute sample # # @return [Hash{Symbol=>Object}, nil] optional :sample, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Hash{Symbol=>Object}] - # attr_writer :sample - # @!method initialize(item:, sample: nil) # @param item [Hash{Symbol=>Object}] # @param sample [Hash{Symbol=>Object}] diff --git a/lib/openai/models/evals/run_cancel_params.rb b/lib/openai/models/evals/run_cancel_params.rb index 958dad3b..6f84e423 100644 --- a/lib/openai/models/evals/run_cancel_params.rb +++ b/lib/openai/models/evals/run_cancel_params.rb @@ -5,8 +5,7 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#cancel class RunCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute eval_id diff --git a/lib/openai/models/evals/run_create_params.rb b/lib/openai/models/evals/run_create_params.rb index 3e03a25c..61365782 100644 --- a/lib/openai/models/evals/run_create_params.rb +++ b/lib/openai/models/evals/run_create_params.rb @@ -5,8 +5,7 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#create class RunCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute data_source @@ -26,16 +25,12 @@ class RunCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] name + # @!attribute name # The name of the run. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(data_source:, metadata: nil, name: nil, request_options: {}) # @param data_source [OpenAI::Models::Evals::CreateEvalJSONLRunDataSource, OpenAI::Models::Evals::CreateEvalCompletionsRunDataSource] # @param metadata [Hash{Symbol=>String}, nil] diff --git a/lib/openai/models/evals/run_delete_params.rb b/lib/openai/models/evals/run_delete_params.rb index 887478b6..0f8cc006 100644 --- a/lib/openai/models/evals/run_delete_params.rb +++ b/lib/openai/models/evals/run_delete_params.rb @@ -5,8 +5,7 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#delete class RunDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute eval_id diff --git a/lib/openai/models/evals/run_delete_response.rb b/lib/openai/models/evals/run_delete_response.rb index bd1154dd..eb8707ad 100644 --- a/lib/openai/models/evals/run_delete_response.rb +++ b/lib/openai/models/evals/run_delete_response.rb @@ -5,33 +5,21 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#delete class RunDeleteResponse < OpenAI::Internal::Type::BaseModel - # @!attribute [r] deleted + # @!attribute deleted # # @return [Boolean, nil] optional :deleted, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :deleted - - # @!attribute [r] object + # @!attribute object # # @return [String, nil] optional :object, String - # @!parse - # # @return [String] - # attr_writer :object - - # @!attribute [r] run_id + # @!attribute run_id # # @return [String, nil] optional :run_id, String - # @!parse - # # @return [String] - # attr_writer :run_id - # @!method initialize(deleted: nil, object: nil, run_id: nil) # @param deleted [Boolean] # @param object [String] diff --git a/lib/openai/models/evals/run_list_params.rb b/lib/openai/models/evals/run_list_params.rb index b5c500f7..a2b2afb4 100644 --- a/lib/openai/models/evals/run_list_params.rb +++ b/lib/openai/models/evals/run_list_params.rb @@ -5,52 +5,35 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#list class RunListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last run from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of runs to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order for runs by timestamp. Use `asc` for ascending order or `desc` for # descending order. Defaults to `asc`. # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Evals::RunListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Order] - # attr_writer :order - - # @!attribute [r] status + # @!attribute status # Filter runs by status. Use "queued" | "in_progress" | "failed" | "completed" | # "canceled". # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status, nil] optional :status, enum: -> { OpenAI::Models::Evals::RunListParams::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Evals::RunListParams::Status] - # attr_writer :status - # @!method initialize(after: nil, limit: nil, order: nil, status: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/evals/run_retrieve_params.rb b/lib/openai/models/evals/run_retrieve_params.rb index 648fa819..0ca8c695 100644 --- a/lib/openai/models/evals/run_retrieve_params.rb +++ b/lib/openai/models/evals/run_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module Evals # @see OpenAI::Resources::Evals::Runs#retrieve class RunRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute eval_id diff --git a/lib/openai/models/evals/runs/output_item_list_params.rb b/lib/openai/models/evals/runs/output_item_list_params.rb index dc3ba2dd..301d2acd 100644 --- a/lib/openai/models/evals/runs/output_item_list_params.rb +++ b/lib/openai/models/evals/runs/output_item_list_params.rb @@ -6,8 +6,7 @@ module Evals module Runs # @see OpenAI::Resources::Evals::Runs::OutputItems#list class OutputItemListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute eval_id @@ -15,48 +14,32 @@ class OutputItemListParams < OpenAI::Internal::Type::BaseModel # @return [String] required :eval_id, String - # @!attribute [r] after + # @!attribute after # Identifier for the last output item from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of output items to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order for output items by timestamp. Use `asc` for ascending order or # `desc` for descending order. Defaults to `asc`. # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Evals::Runs::OutputItemListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Order] - # attr_writer :order - - # @!attribute [r] status + # @!attribute status # Filter output items by status. Use `failed` to filter by failed output items or # `pass` to filter by passed output items. # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status, nil] optional :status, enum: -> { OpenAI::Models::Evals::Runs::OutputItemListParams::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Evals::Runs::OutputItemListParams::Status] - # attr_writer :status - # @!method initialize(eval_id:, after: nil, limit: nil, order: nil, status: nil, request_options: {}) # @param eval_id [String] # @param after [String] diff --git a/lib/openai/models/evals/runs/output_item_list_response.rb b/lib/openai/models/evals/runs/output_item_list_response.rb index fe4db7ad..fc0d6e75 100644 --- a/lib/openai/models/evals/runs/output_item_list_response.rb +++ b/lib/openai/models/evals/runs/output_item_list_response.rb @@ -180,26 +180,18 @@ class Input < OpenAI::Internal::Type::BaseModel end class Output < OpenAI::Internal::Type::BaseModel - # @!attribute [r] content + # @!attribute content # The content of the message. # # @return [String, nil] optional :content, String - # @!parse - # # @return [String] - # attr_writer :content - - # @!attribute [r] role + # @!attribute role # The role of the message (e.g. "system", "assistant", "user"). # # @return [String, nil] optional :role, String - # @!parse - # # @return [String] - # attr_writer :role - # @!method initialize(content: nil, role: nil) # @param content [String] # @param role [String] diff --git a/lib/openai/models/evals/runs/output_item_retrieve_params.rb b/lib/openai/models/evals/runs/output_item_retrieve_params.rb index 599a0b19..d85fa9e5 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_params.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_params.rb @@ -6,8 +6,7 @@ module Evals module Runs # @see OpenAI::Resources::Evals::Runs::OutputItems#retrieve class OutputItemRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute eval_id diff --git a/lib/openai/models/evals/runs/output_item_retrieve_response.rb b/lib/openai/models/evals/runs/output_item_retrieve_response.rb index bf311b56..eec2ea1a 100644 --- a/lib/openai/models/evals/runs/output_item_retrieve_response.rb +++ b/lib/openai/models/evals/runs/output_item_retrieve_response.rb @@ -180,26 +180,18 @@ class Input < OpenAI::Internal::Type::BaseModel end class Output < OpenAI::Internal::Type::BaseModel - # @!attribute [r] content + # @!attribute content # The content of the message. # # @return [String, nil] optional :content, String - # @!parse - # # @return [String] - # attr_writer :content - - # @!attribute [r] role + # @!attribute role # The role of the message (e.g. "system", "assistant", "user"). # # @return [String, nil] optional :role, String - # @!parse - # # @return [String] - # attr_writer :role - # @!method initialize(content: nil, role: nil) # @param content [String] # @param role [String] diff --git a/lib/openai/models/file_content_params.rb b/lib/openai/models/file_content_params.rb index 5557fffd..22d607fb 100644 --- a/lib/openai/models/file_content_params.rb +++ b/lib/openai/models/file_content_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#content class FileContentParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/file_create_params.rb b/lib/openai/models/file_create_params.rb index 567ed995..28d5f936 100644 --- a/lib/openai/models/file_create_params.rb +++ b/lib/openai/models/file_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#create class FileCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute file diff --git a/lib/openai/models/file_delete_params.rb b/lib/openai/models/file_delete_params.rb index 177b99cb..3893e91d 100644 --- a/lib/openai/models/file_delete_params.rb +++ b/lib/openai/models/file_delete_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#delete class FileDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/file_list_params.rb b/lib/openai/models/file_list_params.rb index 5f1be612..76de1996 100644 --- a/lib/openai/models/file_list_params.rb +++ b/lib/openai/models/file_list_params.rb @@ -4,11 +4,10 @@ module OpenAI module Models # @see OpenAI::Resources::Files#list class FileListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -17,42 +16,26 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 10,000, and the default is 10,000. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::FileListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::FileListParams::Order] - # attr_writer :order - - # @!attribute [r] purpose + # @!attribute purpose # Only return files with the given purpose. # # @return [String, nil] optional :purpose, String - # @!parse - # # @return [String] - # attr_writer :purpose - # @!method initialize(after: nil, limit: nil, order: nil, purpose: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/file_object.rb b/lib/openai/models/file_object.rb index fd9c63bb..5031a63d 100644 --- a/lib/openai/models/file_object.rb +++ b/lib/openai/models/file_object.rb @@ -49,27 +49,19 @@ class FileObject < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::FileObject::Status] required :status, enum: -> { OpenAI::Models::FileObject::Status } - # @!attribute [r] expires_at + # @!attribute expires_at # The Unix timestamp (in seconds) for when the file will expire. # # @return [Integer, nil] optional :expires_at, Integer - # @!parse - # # @return [Integer] - # attr_writer :expires_at - - # @!attribute [r] status_details + # @!attribute status_details # Deprecated. For details on why a fine-tuning training file failed validation, # see the `error` field on `fine_tuning.job`. # # @return [String, nil] optional :status_details, String - # @!parse - # # @return [String] - # attr_writer :status_details - # @!method initialize(id:, bytes:, created_at:, filename:, purpose:, status:, expires_at: nil, status_details: nil, object: :file) # The `File` object represents a document that has been uploaded to OpenAI. # diff --git a/lib/openai/models/file_retrieve_params.rb b/lib/openai/models/file_retrieve_params.rb index 6c8c1a70..29c0bdc3 100644 --- a/lib/openai/models/file_retrieve_params.rb +++ b/lib/openai/models/file_retrieve_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Files#retrieve class FileRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb index ef958285..8a333418 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_create_params.rb @@ -6,8 +6,7 @@ module FineTuning module Checkpoints # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#create class PermissionCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute project_ids diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb index 7281cf70..339cc7ea 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_delete_params.rb @@ -6,8 +6,7 @@ module FineTuning module Checkpoints # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#delete class PermissionDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb index 29fffbe9..6c272af8 100644 --- a/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/checkpoints/permission_retrieve_params.rb @@ -6,50 +6,33 @@ module FineTuning module Checkpoints # @see OpenAI::Resources::FineTuning::Checkpoints::Permissions#retrieve class PermissionRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last permission ID from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of permissions to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # The order in which to retrieve permissions. # # @return [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order, nil] optional :order, enum: -> { OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::FineTuning::Checkpoints::PermissionRetrieveParams::Order] - # attr_writer :order - - # @!attribute [r] project_id + # @!attribute project_id # The ID of the project to get permissions for. # # @return [String, nil] optional :project_id, String - # @!parse - # # @return [String] - # attr_writer :project_id - # @!method initialize(after: nil, limit: nil, order: nil, project_id: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/fine_tuning/fine_tuning_job.rb b/lib/openai/models/fine_tuning/fine_tuning_job.rb index 27a10624..a5e3d2a9 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job.rb @@ -132,16 +132,12 @@ class FineTuningJob < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] method_ + # @!attribute method_ # The method used for fine-tuning. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method, nil] optional :method_, -> { OpenAI::Models::FineTuning::FineTuningJob::Method }, api_name: :method - # @!parse - # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method] - # attr_writer :method_ - # @!method initialize(id:, created_at:, error:, fine_tuned_model:, finished_at:, hyperparameters:, model:, organization_id:, result_files:, seed:, status:, trained_tokens:, training_file:, validation_file:, estimated_finish: nil, integrations: nil, metadata: nil, method_: nil, object: :"fine_tuning.job") # The `fine_tuning.job` object represents a fine-tuning job that has been created # through the API. @@ -198,18 +194,14 @@ class Error < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::FineTuningJob#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # # @return [Symbol, :auto, Integer, nil] optional :batch_size, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -217,21 +209,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. This value will only be # returned when running `supervised` jobs. @@ -306,36 +290,24 @@ module Status # @see OpenAI::Models::FineTuning::FineTuningJob#method_ class Method < OpenAI::Internal::Type::BaseModel - # @!attribute [r] dpo + # @!attribute dpo # Configuration for the DPO fine-tuning method. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo, nil] optional :dpo, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo } - # @!parse - # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo] - # attr_writer :dpo - - # @!attribute [r] supervised + # @!attribute supervised # Configuration for the supervised fine-tuning method. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised, nil] optional :supervised, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised } - # @!parse - # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised] - # attr_writer :supervised - - # @!attribute [r] type + # @!attribute type # The type of method. Is either `supervised` or `dpo`. # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type, nil] optional :type, enum: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJob::Method::Type] - # attr_writer :type - # @!method initialize(dpo: nil, supervised: nil, type: nil) # The method used for fine-tuning. # @@ -345,16 +317,12 @@ class Method < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::FineTuningJob::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel - # @!attribute [r] hyperparameters + # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters } - # @!parse - # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters] - # attr_writer :hyperparameters - # @!method initialize(hyperparameters: nil) # Configuration for the DPO fine-tuning method. # @@ -362,7 +330,7 @@ class Dpo < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # @@ -370,11 +338,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :batch_size, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] beta + # @!attribute beta # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. # @@ -382,11 +346,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :beta, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::Beta } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :beta - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -394,11 +354,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # @@ -406,10 +362,6 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :n_epochs, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Dpo::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. # @@ -482,17 +434,13 @@ module NEpochs # @see OpenAI::Models::FineTuning::FineTuningJob::Method#supervised class Supervised < OpenAI::Internal::Type::BaseModel - # @!attribute [r] hyperparameters + # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters } - # @!parse - # # @return [OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters] - # attr_writer :hyperparameters - # @!method initialize(hyperparameters: nil) # Configuration for the supervised fine-tuning method. # @@ -500,7 +448,7 @@ class Supervised < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # @@ -508,11 +456,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :batch_size, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -520,11 +464,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # @@ -532,10 +472,6 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :n_epochs, union: -> { OpenAI::Models::FineTuning::FineTuningJob::Method::Supervised::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. # diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb index f647416e..b5fb144c 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_event.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_event.rb @@ -35,26 +35,18 @@ class FineTuningJobEvent < OpenAI::Internal::Type::BaseModel # @return [Symbol, :"fine_tuning.job.event"] required :object, const: :"fine_tuning.job.event" - # @!attribute [r] data + # @!attribute data # The data associated with the event. # # @return [Object, nil] optional :data, OpenAI::Internal::Type::Unknown - # @!parse - # # @return [Object] - # attr_writer :data - - # @!attribute [r] type + # @!attribute type # The type of event. # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type, nil] optional :type, enum: -> { OpenAI::Models::FineTuning::FineTuningJobEvent::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::FineTuning::FineTuningJobEvent::Type] - # attr_writer :type - # @!method initialize(id:, created_at:, level:, message:, data: nil, type: nil, object: :"fine_tuning.job.event") # Fine-tuning job event object # diff --git a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb index b1a0ba4f..98c9eaca 100644 --- a/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb +++ b/lib/openai/models/fine_tuning/fine_tuning_job_wandb_integration.rb @@ -25,7 +25,7 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :name, String, nil?: true - # @!attribute [r] tags + # @!attribute tags # A list of tags to be attached to the newly created run. These tags are passed # through directly to WandB. Some default tags are generated by OpenAI: # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". @@ -33,10 +33,6 @@ class FineTuningJobWandbIntegration < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tags, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :tags - # @!method initialize(project:, entity: nil, name: nil, tags: nil) # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an diff --git a/lib/openai/models/fine_tuning/job_cancel_params.rb b/lib/openai/models/fine_tuning/job_cancel_params.rb index 129f8e75..4f4cf0b2 100644 --- a/lib/openai/models/fine_tuning/job_cancel_params.rb +++ b/lib/openai/models/fine_tuning/job_cancel_params.rb @@ -5,8 +5,7 @@ module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#cancel class JobCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/fine_tuning/job_create_params.rb b/lib/openai/models/fine_tuning/job_create_params.rb index e27eb79c..ada446ed 100644 --- a/lib/openai/models/fine_tuning/job_create_params.rb +++ b/lib/openai/models/fine_tuning/job_create_params.rb @@ -5,8 +5,7 @@ module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#create class JobCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute model @@ -38,17 +37,13 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @return [String] required :training_file, String - # @!attribute [r] hyperparameters + # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters } - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters] - # attr_writer :hyperparameters - # @!attribute integrations # A list of integrations to enable for your fine-tuning job. # @@ -68,16 +63,12 @@ class JobCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] method_ + # @!attribute method_ # The method used for fine-tuning. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method, nil] optional :method_, -> { OpenAI::Models::FineTuning::JobCreateParams::Method }, api_name: :method - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method] - # attr_writer :method_ - # @!attribute seed # The seed controls the reproducibility of the job. Passing in the same seed and # job parameters should produce the same results, but may differ in rare cases. If @@ -155,7 +146,7 @@ module Model # @deprecated class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # @@ -163,11 +154,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :batch_size, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -175,21 +162,13 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # # @return [Symbol, :auto, Integer, nil] optional :n_epochs, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. This value is now deprecated # in favor of `method`, and should be passed in under the `method` parameter. @@ -288,7 +267,7 @@ class Wandb < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :name, String, nil?: true - # @!attribute [r] tags + # @!attribute tags # A list of tags to be attached to the newly created run. These tags are passed # through directly to WandB. Some default tags are generated by OpenAI: # "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". @@ -296,10 +275,6 @@ class Wandb < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tags, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :tags - # @!method initialize(project:, entity: nil, name: nil, tags: nil) # The settings for your integration with Weights and Biases. This payload # specifies the project that metrics will be sent to. Optionally, you can set an @@ -314,36 +289,24 @@ class Wandb < OpenAI::Internal::Type::BaseModel end class Method < OpenAI::Internal::Type::BaseModel - # @!attribute [r] dpo + # @!attribute dpo # Configuration for the DPO fine-tuning method. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo, nil] optional :dpo, -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo } - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo] - # attr_writer :dpo - - # @!attribute [r] supervised + # @!attribute supervised # Configuration for the supervised fine-tuning method. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised, nil] optional :supervised, -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised } - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised] - # attr_writer :supervised - - # @!attribute [r] type + # @!attribute type # The type of method. Is either `supervised` or `dpo`. # # @return [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type, nil] optional :type, enum: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::FineTuning::JobCreateParams::Method::Type] - # attr_writer :type - # @!method initialize(dpo: nil, supervised: nil, type: nil) # The method used for fine-tuning. # @@ -353,17 +316,13 @@ class Method < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::JobCreateParams::Method#dpo class Dpo < OpenAI::Internal::Type::BaseModel - # @!attribute [r] hyperparameters + # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters } - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters] - # attr_writer :hyperparameters - # @!method initialize(hyperparameters: nil) # Configuration for the DPO fine-tuning method. # @@ -371,7 +330,7 @@ class Dpo < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # @@ -379,11 +338,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :batch_size, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] beta + # @!attribute beta # The beta value for the DPO method. A higher beta value will increase the weight # of the penalty between the policy and reference model. # @@ -391,11 +346,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :beta, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::Beta } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :beta - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -403,11 +354,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # @@ -415,10 +362,6 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :n_epochs, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Dpo::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, beta: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. # @@ -491,17 +434,13 @@ module NEpochs # @see OpenAI::Models::FineTuning::JobCreateParams::Method#supervised class Supervised < OpenAI::Internal::Type::BaseModel - # @!attribute [r] hyperparameters + # @!attribute hyperparameters # The hyperparameters used for the fine-tuning job. # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters, nil] optional :hyperparameters, -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters } - # @!parse - # # @return [OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters] - # attr_writer :hyperparameters - # @!method initialize(hyperparameters: nil) # Configuration for the supervised fine-tuning method. # @@ -509,7 +448,7 @@ class Supervised < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised#hyperparameters class Hyperparameters < OpenAI::Internal::Type::BaseModel - # @!attribute [r] batch_size + # @!attribute batch_size # Number of examples in each batch. A larger batch size means that model # parameters are updated less frequently, but with lower variance. # @@ -517,11 +456,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :batch_size, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::BatchSize } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :batch_size - - # @!attribute [r] learning_rate_multiplier + # @!attribute learning_rate_multiplier # Scaling factor for the learning rate. A smaller learning rate may be useful to # avoid overfitting. # @@ -529,11 +464,7 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :learning_rate_multiplier, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::LearningRateMultiplier } - # @!parse - # # @return [Symbol, :auto, Float] - # attr_writer :learning_rate_multiplier - - # @!attribute [r] n_epochs + # @!attribute n_epochs # The number of epochs to train the model for. An epoch refers to one full cycle # through the training dataset. # @@ -541,10 +472,6 @@ class Hyperparameters < OpenAI::Internal::Type::BaseModel optional :n_epochs, union: -> { OpenAI::Models::FineTuning::JobCreateParams::Method::Supervised::Hyperparameters::NEpochs } - # @!parse - # # @return [Symbol, :auto, Integer] - # attr_writer :n_epochs - # @!method initialize(batch_size: nil, learning_rate_multiplier: nil, n_epochs: nil) # The hyperparameters used for the fine-tuning job. # diff --git a/lib/openai/models/fine_tuning/job_list_events_params.rb b/lib/openai/models/fine_tuning/job_list_events_params.rb index d4729ee6..b745c87b 100644 --- a/lib/openai/models/fine_tuning/job_list_events_params.rb +++ b/lib/openai/models/fine_tuning/job_list_events_params.rb @@ -5,30 +5,21 @@ module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#list_events class JobListEventsParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last event from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of events to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - # @!method initialize(after: nil, limit: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/fine_tuning/job_list_params.rb b/lib/openai/models/fine_tuning/job_list_params.rb index e2ed96ce..ddd836ef 100644 --- a/lib/openai/models/fine_tuning/job_list_params.rb +++ b/lib/openai/models/fine_tuning/job_list_params.rb @@ -5,30 +5,21 @@ module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#list class JobListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last job from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of fine-tuning jobs to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - # @!attribute metadata # Optional metadata filter. To filter, use the syntax `metadata[k]=v`. # Alternatively, set `metadata=null` to indicate no metadata. diff --git a/lib/openai/models/fine_tuning/job_retrieve_params.rb b/lib/openai/models/fine_tuning/job_retrieve_params.rb index b1579373..9d8a5b18 100644 --- a/lib/openai/models/fine_tuning/job_retrieve_params.rb +++ b/lib/openai/models/fine_tuning/job_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module FineTuning # @see OpenAI::Resources::FineTuning::Jobs#retrieve class JobRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb index f4f1bea1..afa0afa4 100644 --- a/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb +++ b/lib/openai/models/fine_tuning/jobs/checkpoint_list_params.rb @@ -6,30 +6,21 @@ module FineTuning module Jobs # @see OpenAI::Resources::FineTuning::Jobs::Checkpoints#list class CheckpointListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # Identifier for the last checkpoint ID from the previous pagination request. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] limit + # @!attribute limit # Number of checkpoints to retrieve. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - # @!method initialize(after: nil, limit: nil, request_options: {}) # @param after [String] # @param limit [Integer] diff --git a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb index 98dbd856..05325975 100644 --- a/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb +++ b/lib/openai/models/fine_tuning/jobs/fine_tuning_job_checkpoint.rb @@ -62,69 +62,41 @@ class FineTuningJobCheckpoint < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::FineTuning::Jobs::FineTuningJobCheckpoint#metrics class Metrics < OpenAI::Internal::Type::BaseModel - # @!attribute [r] full_valid_loss + # @!attribute full_valid_loss # # @return [Float, nil] optional :full_valid_loss, Float - # @!parse - # # @return [Float] - # attr_writer :full_valid_loss - - # @!attribute [r] full_valid_mean_token_accuracy + # @!attribute full_valid_mean_token_accuracy # # @return [Float, nil] optional :full_valid_mean_token_accuracy, Float - # @!parse - # # @return [Float] - # attr_writer :full_valid_mean_token_accuracy - - # @!attribute [r] step + # @!attribute step # # @return [Float, nil] optional :step, Float - # @!parse - # # @return [Float] - # attr_writer :step - - # @!attribute [r] train_loss + # @!attribute train_loss # # @return [Float, nil] optional :train_loss, Float - # @!parse - # # @return [Float] - # attr_writer :train_loss - - # @!attribute [r] train_mean_token_accuracy + # @!attribute train_mean_token_accuracy # # @return [Float, nil] optional :train_mean_token_accuracy, Float - # @!parse - # # @return [Float] - # attr_writer :train_mean_token_accuracy - - # @!attribute [r] valid_loss + # @!attribute valid_loss # # @return [Float, nil] optional :valid_loss, Float - # @!parse - # # @return [Float] - # attr_writer :valid_loss - - # @!attribute [r] valid_mean_token_accuracy + # @!attribute valid_mean_token_accuracy # # @return [Float, nil] optional :valid_mean_token_accuracy, Float - # @!parse - # # @return [Float] - # attr_writer :valid_mean_token_accuracy - # @!method initialize(full_valid_loss: nil, full_valid_mean_token_accuracy: nil, step: nil, train_loss: nil, train_mean_token_accuracy: nil, valid_loss: nil, valid_mean_token_accuracy: nil) # Metrics at the step number during the fine-tuning job. # diff --git a/lib/openai/models/function_definition.rb b/lib/openai/models/function_definition.rb index a37c41bc..ac6820db 100644 --- a/lib/openai/models/function_definition.rb +++ b/lib/openai/models/function_definition.rb @@ -10,18 +10,14 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!attribute [r] description + # @!attribute description # A description of what the function does, used by the model to choose when and # how to call the function. # # @return [String, nil] optional :description, String - # @!parse - # # @return [String] - # attr_writer :description - - # @!attribute [r] parameters + # @!attribute parameters # The parameters the functions accepts, described as a JSON Schema object. See the # [guide](https://platform.openai.com/docs/guides/function-calling) for examples, # and the @@ -33,10 +29,6 @@ class FunctionDefinition < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>Object}, nil] optional :parameters, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Hash{Symbol=>Object}] - # attr_writer :parameters - # @!attribute strict # Whether to enable strict schema adherence when generating the function call. If # set to true, the model will follow the exact schema defined in the `parameters` diff --git a/lib/openai/models/image.rb b/lib/openai/models/image.rb index 7d54b273..d76cb186 100644 --- a/lib/openai/models/image.rb +++ b/lib/openai/models/image.rb @@ -3,38 +3,26 @@ module OpenAI module Models class Image < OpenAI::Internal::Type::BaseModel - # @!attribute [r] b64_json + # @!attribute b64_json # The base64-encoded JSON of the generated image, if `response_format` is # `b64_json`. # # @return [String, nil] optional :b64_json, String - # @!parse - # # @return [String] - # attr_writer :b64_json - - # @!attribute [r] revised_prompt + # @!attribute revised_prompt # The prompt that was used to generate the image, if there was any revision to the # prompt. # # @return [String, nil] optional :revised_prompt, String - # @!parse - # # @return [String] - # attr_writer :revised_prompt - - # @!attribute [r] url + # @!attribute url # The URL of the generated image, if `response_format` is `url` (default). # # @return [String, nil] optional :url, String - # @!parse - # # @return [String] - # attr_writer :url - # @!method initialize(b64_json: nil, revised_prompt: nil, url: nil) # Represents the url or the content of an image generated by the OpenAI API. # diff --git a/lib/openai/models/image_create_variation_params.rb b/lib/openai/models/image_create_variation_params.rb index d2e09d8f..763a9b9a 100644 --- a/lib/openai/models/image_create_variation_params.rb +++ b/lib/openai/models/image_create_variation_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#create_variation class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute image @@ -46,7 +45,7 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageCreateVariationParams::Size, nil] optional :size, enum: -> { OpenAI::Models::ImageCreateVariationParams::Size }, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -54,10 +53,6 @@ class ImageCreateVariationParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(image:, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # @param image [Pathname, StringIO] # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] diff --git a/lib/openai/models/image_edit_params.rb b/lib/openai/models/image_edit_params.rb index 247e370b..19525b78 100644 --- a/lib/openai/models/image_edit_params.rb +++ b/lib/openai/models/image_edit_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#edit class ImageEditParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute image @@ -22,7 +21,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [String] required :prompt, String - # @!attribute [r] mask + # @!attribute mask # An additional image whose fully transparent areas (e.g. where alpha is zero) # indicate where `image` should be edited. Must be a valid PNG file, less than # 4MB, and have the same dimensions as `image`. @@ -30,10 +29,6 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [Pathname, StringIO, nil] optional :mask, OpenAI::Internal::Type::IOLike - # @!parse - # # @return [Pathname, StringIO] - # attr_writer :mask - # @!attribute model # The model to use for image generation. Only `dall-e-2` is supported at this # time. @@ -62,7 +57,7 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageEditParams::Size, nil] optional :size, enum: -> { OpenAI::Models::ImageEditParams::Size }, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -70,10 +65,6 @@ class ImageEditParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(image:, prompt:, mask: nil, model: nil, n: nil, response_format: nil, size: nil, user: nil, request_options: {}) # @param image [Pathname, StringIO] # @param prompt [String] diff --git a/lib/openai/models/image_generate_params.rb b/lib/openai/models/image_generate_params.rb index 43b701b6..9e5550f1 100644 --- a/lib/openai/models/image_generate_params.rb +++ b/lib/openai/models/image_generate_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Images#generate class ImageGenerateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute prompt @@ -28,7 +27,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [Integer, nil] optional :n, Integer, nil?: true - # @!attribute [r] quality + # @!attribute quality # The quality of the image that will be generated. `hd` creates images with finer # details and greater consistency across the image. This param is only supported # for `dall-e-3`. @@ -36,10 +35,6 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageGenerateParams::Quality, nil] optional :quality, enum: -> { OpenAI::Models::ImageGenerateParams::Quality } - # @!parse - # # @return [Symbol, OpenAI::Models::ImageGenerateParams::Quality] - # attr_writer :quality - # @!attribute response_format # The format in which the generated images are returned. Must be one of `url` or # `b64_json`. URLs are only valid for 60 minutes after the image has been @@ -65,7 +60,7 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::ImageGenerateParams::Style, nil] optional :style, enum: -> { OpenAI::Models::ImageGenerateParams::Style }, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -73,10 +68,6 @@ class ImageGenerateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(prompt:, model: nil, n: nil, quality: nil, response_format: nil, size: nil, style: nil, user: nil, request_options: {}) # @param prompt [String] # @param model [String, Symbol, OpenAI::Models::ImageModel, nil] diff --git a/lib/openai/models/model_delete_params.rb b/lib/openai/models/model_delete_params.rb index f288614b..758a1682 100644 --- a/lib/openai/models/model_delete_params.rb +++ b/lib/openai/models/model_delete_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#delete class ModelDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/model_list_params.rb b/lib/openai/models/model_list_params.rb index 52c1d783..77d83f84 100644 --- a/lib/openai/models/model_list_params.rb +++ b/lib/openai/models/model_list_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#list class ModelListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/model_retrieve_params.rb b/lib/openai/models/model_retrieve_params.rb index c2d43bc2..deec29e7 100644 --- a/lib/openai/models/model_retrieve_params.rb +++ b/lib/openai/models/model_retrieve_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Models#retrieve class ModelRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/moderation_create_params.rb b/lib/openai/models/moderation_create_params.rb index 6bc57485..85c8aa74 100644 --- a/lib/openai/models/moderation_create_params.rb +++ b/lib/openai/models/moderation_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Moderations#create class ModerationCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute input @@ -15,7 +14,7 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Array, Array] required :input, union: -> { OpenAI::Models::ModerationCreateParams::Input } - # @!attribute [r] model + # @!attribute model # The content moderation model you would like to use. Learn more in # [the moderation guide](https://platform.openai.com/docs/guides/moderation), and # learn about available models @@ -24,10 +23,6 @@ class ModerationCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, Symbol, OpenAI::Models::ModerationModel, nil] optional :model, union: -> { OpenAI::Models::ModerationCreateParams::Model } - # @!parse - # # @return [String, Symbol, OpenAI::Models::ModerationModel] - # attr_writer :model - # @!method initialize(input:, model: nil, request_options: {}) # @param input [String, Array, Array] # @param model [String, Symbol, OpenAI::Models::ModerationModel] diff --git a/lib/openai/models/response_format_json_schema.rb b/lib/openai/models/response_format_json_schema.rb index 0e9e47fb..c6b0fdf9 100644 --- a/lib/openai/models/response_format_json_schema.rb +++ b/lib/openai/models/response_format_json_schema.rb @@ -32,28 +32,20 @@ class JSONSchema < OpenAI::Internal::Type::BaseModel # @return [String] required :name, String - # @!attribute [r] description + # @!attribute description # A description of what the response format is for, used by the model to determine # how to respond in the format. # # @return [String, nil] optional :description, String - # @!parse - # # @return [String] - # attr_writer :description - - # @!attribute [r] schema + # @!attribute schema # The schema for the response format, described as a JSON Schema object. Learn how # to build JSON schemas [here](https://json-schema.org/). # # @return [Hash{Symbol=>Object}, nil] optional :schema, OpenAI::Internal::Type::HashOf[OpenAI::Internal::Type::Unknown] - # @!parse - # # @return [Hash{Symbol=>Object}] - # attr_writer :schema - # @!attribute strict # Whether to enable strict schema adherence when generating the output. If set to # true, the model will always follow the exact schema defined in the `schema` diff --git a/lib/openai/models/responses/easy_input_message.rb b/lib/openai/models/responses/easy_input_message.rb index 9814987c..3dc4202e 100644 --- a/lib/openai/models/responses/easy_input_message.rb +++ b/lib/openai/models/responses/easy_input_message.rb @@ -18,16 +18,12 @@ class EasyInputMessage < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Role] required :role, enum: -> { OpenAI::Models::Responses::EasyInputMessage::Role } - # @!attribute [r] type + # @!attribute type # The type of the message input. Always `message`. # # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type, nil] optional :type, enum: -> { OpenAI::Models::Responses::EasyInputMessage::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::EasyInputMessage::Type] - # attr_writer :type - # @!method initialize(content:, role:, type: nil) # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take diff --git a/lib/openai/models/responses/file_search_tool.rb b/lib/openai/models/responses/file_search_tool.rb index 8065a25a..85d13196 100644 --- a/lib/openai/models/responses/file_search_tool.rb +++ b/lib/openai/models/responses/file_search_tool.rb @@ -16,37 +16,25 @@ class FileSearchTool < OpenAI::Internal::Type::BaseModel # @return [Array] required :vector_store_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!attribute [r] filters + # @!attribute filters # A filter to apply based on file attributes. # # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] optional :filters, union: -> { OpenAI::Models::Responses::FileSearchTool::Filters } - # @!parse - # # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # attr_writer :filters - - # @!attribute [r] max_num_results + # @!attribute max_num_results # The maximum number of results to return. This number should be between 1 and 50 # inclusive. # # @return [Integer, nil] optional :max_num_results, Integer - # @!parse - # # @return [Integer] - # attr_writer :max_num_results - - # @!attribute [r] ranking_options + # @!attribute ranking_options # Ranking options for search. # # @return [OpenAI::Models::Responses::FileSearchTool::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Models::Responses::FileSearchTool::RankingOptions } - # @!parse - # # @return [OpenAI::Models::Responses::FileSearchTool::RankingOptions] - # attr_writer :ranking_options - # @!method initialize(vector_store_ids:, filters: nil, max_num_results: nil, ranking_options: nil, type: :file_search) # A tool that searches for relevant content from uploaded files. Learn more about # the @@ -76,17 +64,13 @@ module Filters # @see OpenAI::Models::Responses::FileSearchTool#ranking_options class RankingOptions < OpenAI::Internal::Type::BaseModel - # @!attribute [r] ranker + # @!attribute ranker # The ranker to use for the file search. # # @return [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::FileSearchTool::RankingOptions::Ranker] - # attr_writer :ranker - - # @!attribute [r] score_threshold + # @!attribute score_threshold # The score threshold for the file search, a number between 0 and 1. Numbers # closer to 1 will attempt to return only the most relevant results, but may # return fewer results. @@ -94,10 +78,6 @@ class RankingOptions < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :score_threshold, Float - # @!parse - # # @return [Float] - # attr_writer :score_threshold - # @!method initialize(ranker: nil, score_threshold: nil) # Ranking options for search. # diff --git a/lib/openai/models/responses/input_item_list_params.rb b/lib/openai/models/responses/input_item_list_params.rb index eabfd424..d34e8dd7 100644 --- a/lib/openai/models/responses/input_item_list_params.rb +++ b/lib/openai/models/responses/input_item_list_params.rb @@ -5,31 +5,22 @@ module Models module Responses # @see OpenAI::Resources::Responses::InputItems#list class InputItemListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # An item ID to list items after, used in pagination. # # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # An item ID to list items before, used in pagination. # # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] include + # @!attribute include # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. # @@ -37,22 +28,14 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } - # @!parse - # # @return [Array] - # attr_writer :include - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # The order to return the input items in. Default is `asc`. # # - `asc`: Return the input items in ascending order. @@ -61,10 +44,6 @@ class InputItemListParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::Responses::InputItemListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::InputItemListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, before: nil, include: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/responses/response.rb b/lib/openai/models/responses/response.rb index 48793f7b..a51b6317 100644 --- a/lib/openai/models/responses/response.rb +++ b/lib/openai/models/responses/response.rb @@ -181,18 +181,14 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::Response::ServiceTier, nil] optional :service_tier, enum: -> { OpenAI::Models::Responses::Response::ServiceTier }, nil?: true - # @!attribute [r] status + # @!attribute status # The status of the response generation. One of `completed`, `failed`, # `in_progress`, or `incomplete`. # # @return [Symbol, OpenAI::Models::Responses::ResponseStatus, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseStatus } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseStatus] - # attr_writer :status - - # @!attribute [r] text + # @!attribute text # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: # @@ -202,10 +198,6 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Models::Responses::ResponseTextConfig } - # @!parse - # # @return [OpenAI::Models::Responses::ResponseTextConfig] - # attr_writer :text - # @!attribute truncation # The truncation strategy to use for the model response. # @@ -218,18 +210,14 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::Response::Truncation, nil] optional :truncation, enum: -> { OpenAI::Models::Responses::Response::Truncation }, nil?: true - # @!attribute [r] usage + # @!attribute usage # Represents token usage details including input tokens, output tokens, a # breakdown of output tokens, and the total tokens used. # # @return [OpenAI::Models::Responses::ResponseUsage, nil] optional :usage, -> { OpenAI::Models::Responses::ResponseUsage } - # @!parse - # # @return [OpenAI::Models::Responses::ResponseUsage] - # attr_writer :usage - - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -237,10 +225,6 @@ class Response < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(id:, created_at:, error:, incomplete_details:, instructions:, metadata:, model:, output:, parallel_tool_calls:, temperature:, tool_choice:, tools:, top_p:, max_output_tokens: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, status: nil, text: nil, truncation: nil, usage: nil, user: nil, object: :response) # @param id [String] # @param created_at [Float] @@ -268,16 +252,12 @@ class Response < OpenAI::Internal::Type::BaseModel # @see OpenAI::Models::Responses::Response#incomplete_details class IncompleteDetails < OpenAI::Internal::Type::BaseModel - # @!attribute [r] reason + # @!attribute reason # The reason why the response is incomplete. # # @return [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason, nil] optional :reason, enum: -> { OpenAI::Models::Responses::Response::IncompleteDetails::Reason } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::Response::IncompleteDetails::Reason] - # attr_writer :reason - # @!method initialize(reason: nil) # Details about why the response is incomplete. # diff --git a/lib/openai/models/responses/response_computer_tool_call_output_item.rb b/lib/openai/models/responses/response_computer_tool_call_output_item.rb index 2a10286a..9bfc14e2 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_item.rb @@ -28,7 +28,7 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, :computer_call_output] required :type, const: :computer_call_output - # @!attribute [r] acknowledged_safety_checks + # @!attribute acknowledged_safety_checks # The safety checks reported by the API that have been acknowledged by the # developer. # @@ -36,21 +36,13 @@ class ResponseComputerToolCallOutputItem < OpenAI::Internal::Type::BaseModel optional :acknowledged_safety_checks, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::AcknowledgedSafetyCheck] } - # @!parse - # # @return [Array] - # attr_writer :acknowledged_safety_checks - - # @!attribute [r] status + # @!attribute status # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseComputerToolCallOutputItem::Status] - # attr_writer :status - # @!method initialize(id:, call_id:, output:, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # @param id [String] # @param call_id [String] diff --git a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb index e65f4fc5..9dc1550c 100644 --- a/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb +++ b/lib/openai/models/responses/response_computer_tool_call_output_screenshot.rb @@ -11,26 +11,18 @@ class ResponseComputerToolCallOutputScreenshot < OpenAI::Internal::Type::BaseMod # @return [Symbol, :computer_screenshot] required :type, const: :computer_screenshot - # @!attribute [r] file_id + # @!attribute file_id # The identifier of an uploaded file that contains the screenshot. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] image_url + # @!attribute image_url # The URL of the screenshot image. # # @return [String, nil] optional :image_url, String - # @!parse - # # @return [String] - # attr_writer :image_url - # @!method initialize(file_id: nil, image_url: nil, type: :computer_screenshot) # A computer screenshot image used with the computer use tool. # diff --git a/lib/openai/models/responses/response_create_params.rb b/lib/openai/models/responses/response_create_params.rb index 3eeb57fc..f20f9cc0 100644 --- a/lib/openai/models/responses/response_create_params.rb +++ b/lib/openai/models/responses/response_create_params.rb @@ -7,8 +7,7 @@ module Responses # # @see OpenAI::Resources::Responses#stream_raw class ResponseCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute input @@ -142,7 +141,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Float, nil] optional :temperature, Float, nil?: true - # @!attribute [r] text + # @!attribute text # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: # @@ -152,11 +151,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::Responses::ResponseTextConfig, nil] optional :text, -> { OpenAI::Models::Responses::ResponseTextConfig } - # @!parse - # # @return [OpenAI::Models::Responses::ResponseTextConfig] - # attr_writer :text - - # @!attribute [r] tool_choice + # @!attribute tool_choice # How the model should select which tool (or tools) to use when generating a # response. See the `tools` parameter to see how to specify which tools the model # can call. @@ -164,11 +159,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction, nil] optional :tool_choice, union: -> { OpenAI::Models::Responses::ResponseCreateParams::ToolChoice } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ToolChoiceOptions, OpenAI::Models::Responses::ToolChoiceTypes, OpenAI::Models::Responses::ToolChoiceFunction] - # attr_writer :tool_choice - - # @!attribute [r] tools + # @!attribute tools # An array of tools the model may call while generating a response. You can # specify which tool to use by setting the `tool_choice` parameter. # @@ -187,10 +178,6 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :tools, -> { OpenAI::Internal::Type::ArrayOf[union: OpenAI::Models::Responses::Tool] } - # @!parse - # # @return [Array] - # attr_writer :tools - # @!attribute top_p # An alternative to sampling with temperature, called nucleus sampling, where the # model considers the results of the tokens with top_p probability mass. So 0.1 @@ -213,7 +200,7 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ResponseCreateParams::Truncation, nil] optional :truncation, enum: -> { OpenAI::Models::Responses::ResponseCreateParams::Truncation }, nil?: true - # @!attribute [r] user + # @!attribute user # A unique identifier representing your end-user, which can help OpenAI to monitor # and detect abuse. # [Learn more](https://platform.openai.com/docs/guides/safety-best-practices#end-user-ids). @@ -221,10 +208,6 @@ class ResponseCreateParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :user, String - # @!parse - # # @return [String] - # attr_writer :user - # @!method initialize(input:, model:, include: nil, instructions: nil, max_output_tokens: nil, metadata: nil, parallel_tool_calls: nil, previous_response_id: nil, reasoning: nil, service_tier: nil, store: nil, temperature: nil, text: nil, tool_choice: nil, tools: nil, top_p: nil, truncation: nil, user: nil, request_options: {}) # @param input [String, Array] # @param model [String, Symbol, OpenAI::Models::ChatModel, OpenAI::Models::ResponsesModel::ResponsesOnlyModel] diff --git a/lib/openai/models/responses/response_delete_params.rb b/lib/openai/models/responses/response_delete_params.rb index e3d6735f..96a2b404 100644 --- a/lib/openai/models/responses/response_delete_params.rb +++ b/lib/openai/models/responses/response_delete_params.rb @@ -5,8 +5,7 @@ module Models module Responses # @see OpenAI::Resources::Responses#delete class ResponseDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/responses/response_file_search_tool_call.rb b/lib/openai/models/responses/response_file_search_tool_call.rb index 43a865fd..9fc12343 100644 --- a/lib/openai/models/responses/response_file_search_tool_call.rb +++ b/lib/openai/models/responses/response_file_search_tool_call.rb @@ -78,46 +78,30 @@ class Result < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::Responses::ResponseFileSearchToolCall::Result::Attribute] }, nil?: true - # @!attribute [r] file_id + # @!attribute file_id # The unique ID of the file. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] filename + # @!attribute filename # The name of the file. # # @return [String, nil] optional :filename, String - # @!parse - # # @return [String] - # attr_writer :filename - - # @!attribute [r] score + # @!attribute score # The relevance score of the file - a value between 0 and 1. # # @return [Float, nil] optional :score, Float - # @!parse - # # @return [Float] - # attr_writer :score - - # @!attribute [r] text + # @!attribute text # The text that was retrieved from the file. # # @return [String, nil] optional :text, String - # @!parse - # # @return [String] - # attr_writer :text - # @!method initialize(attributes: nil, file_id: nil, filename: nil, score: nil, text: nil) # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] # @param file_id [String] diff --git a/lib/openai/models/responses/response_format_text_json_schema_config.rb b/lib/openai/models/responses/response_format_text_json_schema_config.rb index c6271a6a..106627ff 100644 --- a/lib/openai/models/responses/response_format_text_json_schema_config.rb +++ b/lib/openai/models/responses/response_format_text_json_schema_config.rb @@ -24,17 +24,13 @@ class ResponseFormatTextJSONSchemaConfig < OpenAI::Internal::Type::BaseModel # @return [Symbol, :json_schema] required :type, const: :json_schema - # @!attribute [r] description + # @!attribute description # A description of what the response format is for, used by the model to determine # how to respond in the format. # # @return [String, nil] optional :description, String - # @!parse - # # @return [String] - # attr_writer :description - # @!attribute strict # Whether to enable strict schema adherence when generating the output. If set to # true, the model will always follow the exact schema defined in the `schema` diff --git a/lib/openai/models/responses/response_function_tool_call.rb b/lib/openai/models/responses/response_function_tool_call.rb index 25210a43..7db552df 100644 --- a/lib/openai/models/responses/response_function_tool_call.rb +++ b/lib/openai/models/responses/response_function_tool_call.rb @@ -28,27 +28,19 @@ class ResponseFunctionToolCall < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function_call] required :type, const: :function_call - # @!attribute [r] id + # @!attribute id # The unique ID of the function tool call. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] status + # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCall::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCall::Status] - # attr_writer :status - # @!method initialize(arguments:, call_id:, name:, id: nil, status: nil, type: :function_call) # A tool call to run a function. See the # [function calling guide](https://platform.openai.com/docs/guides/function-calling) diff --git a/lib/openai/models/responses/response_function_tool_call_output_item.rb b/lib/openai/models/responses/response_function_tool_call_output_item.rb index 6eb9b8a9..6623a73c 100644 --- a/lib/openai/models/responses/response_function_tool_call_output_item.rb +++ b/lib/openai/models/responses/response_function_tool_call_output_item.rb @@ -28,17 +28,13 @@ class ResponseFunctionToolCallOutputItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function_call_output] required :type, const: :function_call_output - # @!attribute [r] status + # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseFunctionToolCallOutputItem::Status] - # attr_writer :status - # @!method initialize(id:, call_id:, output:, status: nil, type: :function_call_output) # @param id [String] # @param call_id [String] diff --git a/lib/openai/models/responses/response_input_file.rb b/lib/openai/models/responses/response_input_file.rb index 4b186be4..13aeefd9 100644 --- a/lib/openai/models/responses/response_input_file.rb +++ b/lib/openai/models/responses/response_input_file.rb @@ -10,36 +10,24 @@ class ResponseInputFile < OpenAI::Internal::Type::BaseModel # @return [Symbol, :input_file] required :type, const: :input_file - # @!attribute [r] file_data + # @!attribute file_data # The content of the file to be sent to the model. # # @return [String, nil] optional :file_data, String - # @!parse - # # @return [String] - # attr_writer :file_data - - # @!attribute [r] file_id + # @!attribute file_id # The ID of the file to be sent to the model. # # @return [String, nil] optional :file_id, String - # @!parse - # # @return [String] - # attr_writer :file_id - - # @!attribute [r] filename + # @!attribute filename # The name of the file to be sent to the model. # # @return [String, nil] optional :filename, String - # @!parse - # # @return [String] - # attr_writer :filename - # @!method initialize(file_data: nil, file_id: nil, filename: nil, type: :input_file) # A file input to the model. # diff --git a/lib/openai/models/responses/response_input_item.rb b/lib/openai/models/responses/response_input_item.rb index d70b58bc..72801ab7 100644 --- a/lib/openai/models/responses/response_input_item.rb +++ b/lib/openai/models/responses/response_input_item.rb @@ -72,27 +72,19 @@ class Message < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Role] required :role, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Role } - # @!attribute [r] status + # @!attribute status # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Status] - # attr_writer :status - - # @!attribute [r] type + # @!attribute type # The type of the message input. Always set to `message`. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type, nil] optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputItem::Message::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::Message::Type] - # attr_writer :type - # @!method initialize(content:, role:, status: nil, type: nil) # A message input to the model with a role indicating instruction following # hierarchy. Instructions given with the `developer` or `system` role take @@ -164,17 +156,13 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel # @return [Symbol, :computer_call_output] required :type, const: :computer_call_output - # @!attribute [r] id + # @!attribute id # The ID of the computer tool call output. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] acknowledged_safety_checks + # @!attribute acknowledged_safety_checks # The safety checks reported by the API that have been acknowledged by the # developer. # @@ -182,21 +170,13 @@ class ComputerCallOutput < OpenAI::Internal::Type::BaseModel optional :acknowledged_safety_checks, -> { OpenAI::Internal::Type::ArrayOf[OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::AcknowledgedSafetyCheck] } - # @!parse - # # @return [Array] - # attr_writer :acknowledged_safety_checks - - # @!attribute [r] status + # @!attribute status # The status of the message input. One of `in_progress`, `completed`, or # `incomplete`. Populated when input items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::ComputerCallOutput::Status] - # attr_writer :status - # @!method initialize(call_id:, output:, id: nil, acknowledged_safety_checks: nil, status: nil, type: :computer_call_output) # The output of a computer tool call. # @@ -269,28 +249,20 @@ class FunctionCallOutput < OpenAI::Internal::Type::BaseModel # @return [Symbol, :function_call_output] required :type, const: :function_call_output - # @!attribute [r] id + # @!attribute id # The unique ID of the function tool call output. Populated when this item is # returned via API. # # @return [String, nil] optional :id, String - # @!parse - # # @return [String] - # attr_writer :id - - # @!attribute [r] status + # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputItem::FunctionCallOutput::Status] - # attr_writer :status - # @!method initialize(call_id:, output:, id: nil, status: nil, type: :function_call_output) # The output of a function tool call. # diff --git a/lib/openai/models/responses/response_input_message_item.rb b/lib/openai/models/responses/response_input_message_item.rb index 2c3f05d2..2b7b1ab1 100644 --- a/lib/openai/models/responses/response_input_message_item.rb +++ b/lib/openai/models/responses/response_input_message_item.rb @@ -24,27 +24,19 @@ class ResponseInputMessageItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Role] required :role, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Role } - # @!attribute [r] status + # @!attribute status # The status of item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Status] - # attr_writer :status - - # @!attribute [r] type + # @!attribute type # The type of the message input. Always set to `message`. # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type, nil] optional :type, enum: -> { OpenAI::Models::Responses::ResponseInputMessageItem::Type } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseInputMessageItem::Type] - # attr_writer :type - # @!method initialize(id:, content:, role:, status: nil, type: nil) # @param id [String] # @param content [Array] diff --git a/lib/openai/models/responses/response_reasoning_item.rb b/lib/openai/models/responses/response_reasoning_item.rb index fb8d5db6..bec6c280 100644 --- a/lib/openai/models/responses/response_reasoning_item.rb +++ b/lib/openai/models/responses/response_reasoning_item.rb @@ -23,17 +23,13 @@ class ResponseReasoningItem < OpenAI::Internal::Type::BaseModel # @return [Symbol, :reasoning] required :type, const: :reasoning - # @!attribute [r] status + # @!attribute status # The status of the item. One of `in_progress`, `completed`, or `incomplete`. # Populated when items are returned via API. # # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status, nil] optional :status, enum: -> { OpenAI::Models::Responses::ResponseReasoningItem::Status } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::ResponseReasoningItem::Status] - # attr_writer :status - # @!method initialize(id:, summary:, status: nil, type: :reasoning) # A description of the chain of thought used by a reasoning model while generating # a response. diff --git a/lib/openai/models/responses/response_retrieve_params.rb b/lib/openai/models/responses/response_retrieve_params.rb index 1b64f738..bb881916 100644 --- a/lib/openai/models/responses/response_retrieve_params.rb +++ b/lib/openai/models/responses/response_retrieve_params.rb @@ -5,11 +5,10 @@ module Models module Responses # @see OpenAI::Resources::Responses#retrieve class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] include + # @!attribute include # Additional fields to include in the response. See the `include` parameter for # Response creation above for more information. # @@ -17,10 +16,6 @@ class ResponseRetrieveParams < OpenAI::Internal::Type::BaseModel optional :include, -> { OpenAI::Internal::Type::ArrayOf[enum: OpenAI::Models::Responses::ResponseIncludable] } - # @!parse - # # @return [Array] - # attr_writer :include - # @!method initialize(include: nil, request_options: {}) # @param include [Array] # @param request_options [OpenAI::RequestOptions, Hash{Symbol=>Object}] diff --git a/lib/openai/models/responses/response_text_config.rb b/lib/openai/models/responses/response_text_config.rb index cd0bf58f..05e2d3db 100644 --- a/lib/openai/models/responses/response_text_config.rb +++ b/lib/openai/models/responses/response_text_config.rb @@ -4,7 +4,7 @@ module OpenAI module Models module Responses class ResponseTextConfig < OpenAI::Internal::Type::BaseModel - # @!attribute [r] format_ + # @!attribute format_ # An object specifying the format that the model must output. # # Configuring `{ "type": "json_schema" }` enables Structured Outputs, which @@ -22,10 +22,6 @@ class ResponseTextConfig < OpenAI::Internal::Type::BaseModel # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject, nil] optional :format_, union: -> { OpenAI::Models::Responses::ResponseFormatTextConfig }, api_name: :format - # @!parse - # # @return [OpenAI::Models::ResponseFormatText, OpenAI::Models::Responses::ResponseFormatTextJSONSchemaConfig, OpenAI::Models::ResponseFormatJSONObject] - # attr_writer :format_ - # @!method initialize(format_: nil) # Configuration options for a text response from the model. Can be plain text or # structured JSON data. Learn more: diff --git a/lib/openai/models/responses/web_search_tool.rb b/lib/openai/models/responses/web_search_tool.rb index 3ed57a22..09522f82 100644 --- a/lib/openai/models/responses/web_search_tool.rb +++ b/lib/openai/models/responses/web_search_tool.rb @@ -13,17 +13,13 @@ class WebSearchTool < OpenAI::Internal::Type::BaseModel # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::Type] required :type, enum: -> { OpenAI::Models::Responses::WebSearchTool::Type } - # @!attribute [r] search_context_size + # @!attribute search_context_size # High level guidance for the amount of context window space to use for the # search. One of `low`, `medium`, or `high`. `medium` is the default. # # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize, nil] optional :search_context_size, enum: -> { OpenAI::Models::Responses::WebSearchTool::SearchContextSize } - # @!parse - # # @return [Symbol, OpenAI::Models::Responses::WebSearchTool::SearchContextSize] - # attr_writer :search_context_size - # @!attribute user_location # # @return [OpenAI::Models::Responses::WebSearchTool::UserLocation, nil] @@ -77,48 +73,32 @@ class UserLocation < OpenAI::Internal::Type::BaseModel # @return [Symbol, :approximate] required :type, const: :approximate - # @!attribute [r] city + # @!attribute city # Free text input for the city of the user, e.g. `San Francisco`. # # @return [String, nil] optional :city, String - # @!parse - # # @return [String] - # attr_writer :city - - # @!attribute [r] country + # @!attribute country # The two-letter [ISO country code](https://en.wikipedia.org/wiki/ISO_3166-1) of # the user, e.g. `US`. # # @return [String, nil] optional :country, String - # @!parse - # # @return [String] - # attr_writer :country - - # @!attribute [r] region + # @!attribute region # Free text input for the region of the user, e.g. `California`. # # @return [String, nil] optional :region, String - # @!parse - # # @return [String] - # attr_writer :region - - # @!attribute [r] timezone + # @!attribute timezone # The [IANA timezone](https://timeapi.io/documentation/iana-timezones) of the # user, e.g. `America/Los_Angeles`. # # @return [String, nil] optional :timezone, String - # @!parse - # # @return [String] - # attr_writer :timezone - # @!method initialize(city: nil, country: nil, region: nil, timezone: nil, type: :approximate) # @param city [String] # @param country [String] diff --git a/lib/openai/models/upload_cancel_params.rb b/lib/openai/models/upload_cancel_params.rb index 1b7164e8..717800ca 100644 --- a/lib/openai/models/upload_cancel_params.rb +++ b/lib/openai/models/upload_cancel_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Uploads#cancel class UploadCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/upload_complete_params.rb b/lib/openai/models/upload_complete_params.rb index 77f01df1..b978c7a2 100644 --- a/lib/openai/models/upload_complete_params.rb +++ b/lib/openai/models/upload_complete_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Uploads#complete class UploadCompleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute part_ids @@ -14,17 +13,13 @@ class UploadCompleteParams < OpenAI::Internal::Type::BaseModel # @return [Array] required :part_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!attribute [r] md5 + # @!attribute md5 # The optional md5 checksum for the file contents to verify if the bytes uploaded # matches what you expect. # # @return [String, nil] optional :md5, String - # @!parse - # # @return [String] - # attr_writer :md5 - # @!method initialize(part_ids:, md5: nil, request_options: {}) # @param part_ids [Array] # @param md5 [String] diff --git a/lib/openai/models/upload_create_params.rb b/lib/openai/models/upload_create_params.rb index 0c7d54d5..afa6ec61 100644 --- a/lib/openai/models/upload_create_params.rb +++ b/lib/openai/models/upload_create_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::Uploads#create class UploadCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute bytes diff --git a/lib/openai/models/uploads/part_create_params.rb b/lib/openai/models/uploads/part_create_params.rb index 1e11840f..9294e4c0 100644 --- a/lib/openai/models/uploads/part_create_params.rb +++ b/lib/openai/models/uploads/part_create_params.rb @@ -5,8 +5,7 @@ module Models module Uploads # @see OpenAI::Resources::Uploads::Parts#create class PartCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute data diff --git a/lib/openai/models/vector_store.rb b/lib/openai/models/vector_store.rb index 5ad255fe..3db76e67 100644 --- a/lib/openai/models/vector_store.rb +++ b/lib/openai/models/vector_store.rb @@ -64,16 +64,12 @@ class VectorStore < OpenAI::Internal::Type::BaseModel # @return [Integer] required :usage_bytes, Integer - # @!attribute [r] expires_after + # @!attribute expires_after # The expiration policy for a vector store. # # @return [OpenAI::Models::VectorStore::ExpiresAfter, nil] optional :expires_after, -> { OpenAI::Models::VectorStore::ExpiresAfter } - # @!parse - # # @return [OpenAI::Models::VectorStore::ExpiresAfter] - # attr_writer :expires_after - # @!attribute expires_at # The Unix timestamp (in seconds) for when the vector store will expire. # diff --git a/lib/openai/models/vector_store_create_params.rb b/lib/openai/models/vector_store_create_params.rb index 2899d54e..30eb84d9 100644 --- a/lib/openai/models/vector_store_create_params.rb +++ b/lib/openai/models/vector_store_create_params.rb @@ -4,32 +4,23 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#create class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } - # @!parse - # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # attr_writer :chunking_strategy - - # @!attribute [r] expires_after + # @!attribute expires_after # The expiration policy for a vector store. # # @return [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter, nil] optional :expires_after, -> { OpenAI::Models::VectorStoreCreateParams::ExpiresAfter } - # @!parse - # # @return [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] - # attr_writer :expires_after - - # @!attribute [r] file_ids + # @!attribute file_ids # A list of [File](https://platform.openai.com/docs/api-reference/files) IDs that # the vector store should use. Useful for tools like `file_search` that can access # files. @@ -37,10 +28,6 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @return [Array, nil] optional :file_ids, OpenAI::Internal::Type::ArrayOf[String] - # @!parse - # # @return [Array] - # attr_writer :file_ids - # @!attribute metadata # Set of 16 key-value pairs that can be attached to an object. This can be useful # for storing additional information about the object in a structured format, and @@ -52,16 +39,12 @@ class VectorStoreCreateParams < OpenAI::Internal::Type::BaseModel # @return [Hash{Symbol=>String}, nil] optional :metadata, OpenAI::Internal::Type::HashOf[String], nil?: true - # @!attribute [r] name + # @!attribute name # The name of the vector store. # # @return [String, nil] optional :name, String - # @!parse - # # @return [String] - # attr_writer :name - # @!method initialize(chunking_strategy: nil, expires_after: nil, file_ids: nil, metadata: nil, name: nil, request_options: {}) # @param chunking_strategy [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] # @param expires_after [OpenAI::Models::VectorStoreCreateParams::ExpiresAfter] diff --git a/lib/openai/models/vector_store_delete_params.rb b/lib/openai/models/vector_store_delete_params.rb index e307e25d..11a788e6 100644 --- a/lib/openai/models/vector_store_delete_params.rb +++ b/lib/openai/models/vector_store_delete_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#delete class VectorStoreDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/vector_store_list_params.rb b/lib/openai/models/vector_store_list_params.rb index 610dd889..caa16c07 100644 --- a/lib/openai/models/vector_store_list_params.rb +++ b/lib/openai/models/vector_store_list_params.rb @@ -4,11 +4,10 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#list class VectorStoreListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -17,11 +16,7 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -30,32 +25,20 @@ class VectorStoreListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::VectorStoreListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::VectorStoreListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStoreListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, before: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/vector_store_retrieve_params.rb b/lib/openai/models/vector_store_retrieve_params.rb index aaf50986..004d1047 100644 --- a/lib/openai/models/vector_store_retrieve_params.rb +++ b/lib/openai/models/vector_store_retrieve_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#retrieve class VectorStoreRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!method initialize(request_options: {}) diff --git a/lib/openai/models/vector_store_search_params.rb b/lib/openai/models/vector_store_search_params.rb index 9807ed6a..808daadb 100644 --- a/lib/openai/models/vector_store_search_params.rb +++ b/lib/openai/models/vector_store_search_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#search class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute query @@ -14,47 +13,31 @@ class VectorStoreSearchParams < OpenAI::Internal::Type::BaseModel # @return [String, Array] required :query, union: -> { OpenAI::Models::VectorStoreSearchParams::Query } - # @!attribute [r] filters + # @!attribute filters # A filter to apply based on file attributes. # # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter, nil] optional :filters, union: -> { OpenAI::Models::VectorStoreSearchParams::Filters } - # @!parse - # # @return [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] - # attr_writer :filters - - # @!attribute [r] max_num_results + # @!attribute max_num_results # The maximum number of results to return. This number should be between 1 and 50 # inclusive. # # @return [Integer, nil] optional :max_num_results, Integer - # @!parse - # # @return [Integer] - # attr_writer :max_num_results - - # @!attribute [r] ranking_options + # @!attribute ranking_options # Ranking options for search. # # @return [OpenAI::Models::VectorStoreSearchParams::RankingOptions, nil] optional :ranking_options, -> { OpenAI::Models::VectorStoreSearchParams::RankingOptions } - # @!parse - # # @return [OpenAI::Models::VectorStoreSearchParams::RankingOptions] - # attr_writer :ranking_options - - # @!attribute [r] rewrite_query + # @!attribute rewrite_query # Whether to rewrite the natural language query for vector search. # # @return [Boolean, nil] optional :rewrite_query, OpenAI::Internal::Type::Boolean - # @!parse - # # @return [Boolean] - # attr_writer :rewrite_query - # @!method initialize(query:, filters: nil, max_num_results: nil, ranking_options: nil, rewrite_query: nil, request_options: {}) # @param query [String, Array] # @param filters [OpenAI::Models::ComparisonFilter, OpenAI::Models::CompoundFilter] @@ -92,24 +75,16 @@ module Filters end class RankingOptions < OpenAI::Internal::Type::BaseModel - # @!attribute [r] ranker + # @!attribute ranker # # @return [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker, nil] optional :ranker, enum: -> { OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStoreSearchParams::RankingOptions::Ranker] - # attr_writer :ranker - - # @!attribute [r] score_threshold + # @!attribute score_threshold # # @return [Float, nil] optional :score_threshold, Float - # @!parse - # # @return [Float] - # attr_writer :score_threshold - # @!method initialize(ranker: nil, score_threshold: nil) # Ranking options for search. # diff --git a/lib/openai/models/vector_store_update_params.rb b/lib/openai/models/vector_store_update_params.rb index 9bda9d94..91eafa78 100644 --- a/lib/openai/models/vector_store_update_params.rb +++ b/lib/openai/models/vector_store_update_params.rb @@ -4,8 +4,7 @@ module OpenAI module Models # @see OpenAI::Resources::VectorStores#update class VectorStoreUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute expires_after diff --git a/lib/openai/models/vector_stores/file_batch_cancel_params.rb b/lib/openai/models/vector_stores/file_batch_cancel_params.rb index c79d293e..c6f2f182 100644 --- a/lib/openai/models/vector_stores/file_batch_cancel_params.rb +++ b/lib/openai/models/vector_stores/file_batch_cancel_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#cancel class FileBatchCancelParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_batch_create_params.rb b/lib/openai/models/vector_stores/file_batch_create_params.rb index f0517a4c..9a873239 100644 --- a/lib/openai/models/vector_stores/file_batch_create_params.rb +++ b/lib/openai/models/vector_stores/file_batch_create_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#create class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute file_ids @@ -29,17 +28,13 @@ class FileBatchCreateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileBatchCreateParams::Attribute] }, nil?: true - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } - # @!parse - # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # attr_writer :chunking_strategy - # @!method initialize(file_ids:, attributes: nil, chunking_strategy: nil, request_options: {}) # @param file_ids [Array] # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] diff --git a/lib/openai/models/vector_stores/file_batch_list_files_params.rb b/lib/openai/models/vector_stores/file_batch_list_files_params.rb index a99326c3..543b49c7 100644 --- a/lib/openai/models/vector_stores/file_batch_list_files_params.rb +++ b/lib/openai/models/vector_stores/file_batch_list_files_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#list_files class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id @@ -14,7 +13,7 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @return [String] required :vector_store_id, String - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -23,11 +22,7 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -36,42 +31,26 @@ class FileBatchListFilesParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] filter + # @!attribute filter # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter, nil] optional :filter, enum: -> { OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Filter] - # attr_writer :filter - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order, nil] optional :order, enum: -> { OpenAI::Models::VectorStores::FileBatchListFilesParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStores::FileBatchListFilesParams::Order] - # attr_writer :order - # @!method initialize(vector_store_id:, after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) # @param vector_store_id [String] # @param after [String] diff --git a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb index 48cf6115..3ec39a5b 100644 --- a/lib/openai/models/vector_stores/file_batch_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_batch_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::FileBatches#retrieve class FileBatchRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_content_params.rb b/lib/openai/models/vector_stores/file_content_params.rb index e4f3deec..0dbc1139 100644 --- a/lib/openai/models/vector_stores/file_content_params.rb +++ b/lib/openai/models/vector_stores/file_content_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#content class FileContentParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_content_response.rb b/lib/openai/models/vector_stores/file_content_response.rb index b4924ae1..094dcbfb 100644 --- a/lib/openai/models/vector_stores/file_content_response.rb +++ b/lib/openai/models/vector_stores/file_content_response.rb @@ -5,26 +5,18 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#content class FileContentResponse < OpenAI::Internal::Type::BaseModel - # @!attribute [r] text + # @!attribute text # The text content # # @return [String, nil] optional :text, String - # @!parse - # # @return [String] - # attr_writer :text - - # @!attribute [r] type + # @!attribute type # The content type (currently only `"text"`) # # @return [String, nil] optional :type, String - # @!parse - # # @return [String] - # attr_writer :type - # @!method initialize(text: nil, type: nil) # @param text [String] # @param type [String] diff --git a/lib/openai/models/vector_stores/file_create_params.rb b/lib/openai/models/vector_stores/file_create_params.rb index d8307a70..2d0fc3ea 100644 --- a/lib/openai/models/vector_stores/file_create_params.rb +++ b/lib/openai/models/vector_stores/file_create_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#create class FileCreateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute file_id @@ -29,17 +28,13 @@ class FileCreateParams < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::FileCreateParams::Attribute] }, nil?: true - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The chunking strategy used to chunk the file(s). If not set, will use the `auto` # strategy. Only applicable if `file_ids` is non-empty. # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategyParam } - # @!parse - # # @return [OpenAI::Models::AutoFileChunkingStrategyParam, OpenAI::Models::StaticFileChunkingStrategyObjectParam] - # attr_writer :chunking_strategy - # @!method initialize(file_id:, attributes: nil, chunking_strategy: nil, request_options: {}) # @param file_id [String] # @param attributes [Hash{Symbol=>String, Float, Boolean}, nil] diff --git a/lib/openai/models/vector_stores/file_delete_params.rb b/lib/openai/models/vector_stores/file_delete_params.rb index 25a5fbcc..ef1c9179 100644 --- a/lib/openai/models/vector_stores/file_delete_params.rb +++ b/lib/openai/models/vector_stores/file_delete_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#delete class FileDeleteParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_list_params.rb b/lib/openai/models/vector_stores/file_list_params.rb index 0c80decc..551d5a17 100644 --- a/lib/openai/models/vector_stores/file_list_params.rb +++ b/lib/openai/models/vector_stores/file_list_params.rb @@ -5,11 +5,10 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#list class FileListParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters - # @!attribute [r] after + # @!attribute after # A cursor for use in pagination. `after` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # ending with obj_foo, your subsequent call can include after=obj_foo in order to @@ -18,11 +17,7 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :after, String - # @!parse - # # @return [String] - # attr_writer :after - - # @!attribute [r] before + # @!attribute before # A cursor for use in pagination. `before` is an object ID that defines your place # in the list. For instance, if you make a list request and receive 100 objects, # starting with obj_foo, your subsequent call can include before=obj_foo in order @@ -31,42 +26,26 @@ class FileListParams < OpenAI::Internal::Type::BaseModel # @return [String, nil] optional :before, String - # @!parse - # # @return [String] - # attr_writer :before - - # @!attribute [r] filter + # @!attribute filter # Filter by file status. One of `in_progress`, `completed`, `failed`, `cancelled`. # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter, nil] optional :filter, enum: -> { OpenAI::Models::VectorStores::FileListParams::Filter } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Filter] - # attr_writer :filter - - # @!attribute [r] limit + # @!attribute limit # A limit on the number of objects to be returned. Limit can range between 1 and # 100, and the default is 20. # # @return [Integer, nil] optional :limit, Integer - # @!parse - # # @return [Integer] - # attr_writer :limit - - # @!attribute [r] order + # @!attribute order # Sort order by the `created_at` timestamp of the objects. `asc` for ascending # order and `desc` for descending order. # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Order, nil] optional :order, enum: -> { OpenAI::Models::VectorStores::FileListParams::Order } - # @!parse - # # @return [Symbol, OpenAI::Models::VectorStores::FileListParams::Order] - # attr_writer :order - # @!method initialize(after: nil, before: nil, filter: nil, limit: nil, order: nil, request_options: {}) # @param after [String] # @param before [String] diff --git a/lib/openai/models/vector_stores/file_retrieve_params.rb b/lib/openai/models/vector_stores/file_retrieve_params.rb index 2b63ee84..3a301cdb 100644 --- a/lib/openai/models/vector_stores/file_retrieve_params.rb +++ b/lib/openai/models/vector_stores/file_retrieve_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#retrieve class FileRetrieveParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/file_update_params.rb b/lib/openai/models/vector_stores/file_update_params.rb index 9cf5a31a..f054fa7a 100644 --- a/lib/openai/models/vector_stores/file_update_params.rb +++ b/lib/openai/models/vector_stores/file_update_params.rb @@ -5,8 +5,7 @@ module Models module VectorStores # @see OpenAI::Resources::VectorStores::Files#update class FileUpdateParams < OpenAI::Internal::Type::BaseModel - # @!parse - # extend OpenAI::Internal::Type::RequestParameters::Converter + extend OpenAI::Internal::Type::RequestParameters::Converter include OpenAI::Internal::Type::RequestParameters # @!attribute vector_store_id diff --git a/lib/openai/models/vector_stores/vector_store_file.rb b/lib/openai/models/vector_stores/vector_store_file.rb index ae51122a..4891ed5a 100644 --- a/lib/openai/models/vector_stores/vector_store_file.rb +++ b/lib/openai/models/vector_stores/vector_store_file.rb @@ -66,16 +66,12 @@ class VectorStoreFile < OpenAI::Internal::Type::BaseModel -> { OpenAI::Internal::Type::HashOf[union: OpenAI::Models::VectorStores::VectorStoreFile::Attribute] }, nil?: true - # @!attribute [r] chunking_strategy + # @!attribute chunking_strategy # The strategy used to chunk the file. # # @return [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject, nil] optional :chunking_strategy, union: -> { OpenAI::Models::FileChunkingStrategy } - # @!parse - # # @return [OpenAI::Models::StaticFileChunkingStrategyObject, OpenAI::Models::OtherFileChunkingStrategyObject] - # attr_writer :chunking_strategy - # @!method initialize(id:, created_at:, last_error:, status:, usage_bytes:, vector_store_id:, attributes: nil, chunking_strategy: nil, object: :"vector_store.file") # A list of files attached to a vector store. # From ff43d73fe6e514d5b9f110892c3880998df8dacf Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 20:09:22 +0000 Subject: [PATCH 11/15] chore: documentation improvements --- README.md | 157 +++++++++++++++++++++++++------------------------- Rakefile | 4 +- lib/openai.rb | 9 --- 3 files changed, 81 insertions(+), 89 deletions(-) diff --git a/README.md b/README.md index 225a0783..d5936411 100644 --- a/README.md +++ b/README.md @@ -31,16 +31,26 @@ openai = OpenAI::Client.new( ) chat_completion = openai.chat.completions.create( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" + messages: [{role: :user, content: "Say this is a test"}], + model: :"gpt-4.1" ) puts(chat_completion) ``` +## Sorbet + +This library is written with [Sorbet type definitions](https://sorbet.org/docs/rbi). However, there is no runtime dependency on the `sorbet-runtime`. + +When using sorbet, it is recommended to use model classes as below. This provides stronger type checking and tooling integration. + +```ruby +openai.chat.completions.create( + messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: :user, content: "Say this is a test")], + model: :"gpt-4.1" +) +``` + ### Pagination List methods in the OpenAI API are paginated. @@ -68,11 +78,8 @@ We provide support for streaming responses using Server-Sent Events (SSE). ```ruby stream = openai.chat.completions.stream_raw( - messages: [{ - role: "user", - content: "Say this is a test" - }], - model: "gpt-4o" + messages: [{role: :user, content: "Say this is a test"}], + model: :"gpt-4.1" ) stream.each do |completion| @@ -88,11 +95,11 @@ Request parameters that correspond to file uploads can be passed as `StringIO`, require "pathname" # using `Pathname`, the file will be lazily read, without reading everything in to memory -file_object = openai.files.create(file: Pathname("input.jsonl"), purpose: "fine-tune") +file_object = openai.files.create(file: Pathname("input.jsonl"), purpose: :"fine-tune") file = File.read("input.jsonl") # using `StringIO`, useful if you already have the data in memory -file_object = openai.files.create(file: StringIO.new(file), purpose: "fine-tune") +file_object = openai.files.create(file: StringIO.new(file), purpose: :"fine-tune") puts(file_object.id) ``` @@ -103,7 +110,7 @@ When the library is unable to connect to the API, or if the API returns a non-su ```ruby begin - job = openai.fine_tuning.jobs.create(model: "gpt-4o", training_file: "file-abc123") + job = openai.fine_tuning.jobs.create(model: :"babbage-002", training_file: "file-abc123") rescue OpenAI::Errors::APIError => e puts(e.status) # 400 end @@ -141,11 +148,8 @@ openai = OpenAI::Client.new( # Or, configure per-request: openai.chat.completions.create( - messages: [{ - role: "user", - content: "How can I get the name of the current day in JavaScript?" - }], - model: "gpt-4o", + messages: [{role: :user, content: "How can I get the name of the current day in JavaScript?"}], + model: :"gpt-4.1", request_options: {max_retries: 5} ) ``` @@ -166,78 +170,44 @@ openai = OpenAI::Client.new( # Or, configure per-request: openai.chat.completions.create( - messages: [{ - role: "user", - content: "How can I list all files in a directory using Python?" - }], - model: "gpt-4o", + messages: [{role: :user, content: "How can I list all files in a directory using Python?"}], + model: :"gpt-4.1", request_options: {timeout: 5} ) ``` -## LSP Support +## Editor support -### Solargraph +Some editor language services like [Solargraph](https://github.com/castwide/solargraph?tab=readme-ov-file#gem-support) or [Sorbet](https://sorbet.org/docs/rbi#the-hidden-definitions-rbi) require a manually triggered indexing step before functionalities like auto-completion and go to definition can operate. -This library includes [Solargraph](https://solargraph.org) support for both auto completion and go to definition. +Please refer to their respective documentation for details. This library also includes a [short guide](https://github.com/openai/openai-ruby/tree/main/CONTRIBUTING.md#editor-support) on how to set up various editor services for internal development. -```ruby -gem "solargraph", group: :development -``` +## Advanced Concepts -After Solargraph is installed, **you must populate its index** either via the provided editor command, or by running the following in your terminal: +### Model DSL -```sh -bundle exec solargraph gems -``` +This library uses a Model DSL to represent request parameters and response shapes in `lib/openai/models`. -Note: if you had installed the gem either using a `git:` or `github:` URL, or had vendored the gem using bundler, you will need to set up your [`.solargraph.yml`](https://solargraph.org/guides/configuration) to include the path to the gem's `lib` directory. +The model classes service as anchor points for both toolchain readable documentation, and language service assisted navigation links. This information also allows the SDK's internals to perform translation between plain and rich data types; e.g., conversion between a `Time` instance and an ISO8601 `String`, and vice versa. -```yaml -include: - - 'vendor/bundle/ruby/*/gems/openai-*/lib/**/*.rb' -``` - -Otherwise Solargraph will not be able to provide type information or auto-completion for any non-indexed libraries. - -### Sorbet - -This library is written with [Sorbet type definitions](https://sorbet.org/docs/rbi). However, there is no runtime dependency on the `sorbet-runtime`. - -What this means is that while you can use Sorbet to type check your code statically, and benefit from the [Sorbet Language Server](https://sorbet.org/docs/lsp) in your editor, there is no runtime type checking and execution overhead from Sorbet itself. - -Due to limitations with the Sorbet type system, where a method otherwise can take an instance of `OpenAI::BaseModel` class, you will need to use the `**` splat operator to pass the arguments: - -Please follow Sorbet's [setup guides](https://sorbet.org/docs/adopting) for best experience. +In all places where a `BaseModel` type is specified, vanilla Ruby `Hash` can also be used. For example, the following are interchangeable as arguments: ```ruby +# This has tooling readability, for auto-completion, static analysis, and goto definition with supported language services params = OpenAI::Models::Chat::CompletionCreateParams.new( - messages: [ - OpenAI::Models::ChatCompletionUserMessageParam.new( - role: "user", - content: "Say this is a test" - ) - ], - model: "gpt-4o" - ) + messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: :user, content: "Say this is a test")], + model: :"gpt-4.1" +) -openai.chat.completions.create(**params) +# This also works +params = { + messages: [{role: :user, content: "Say this is a test"}], + model: :"gpt-4.1" +} ``` -Note: **This library emits an intentional warning under the [`tapioca` toolchain](https://github.com/Shopify/tapioca)**. This is normal, and does not impact functionality. - -### Ruby LSP - -The Ruby LSP has [best effort support](https://shopify.github.io/ruby-lsp/#guessed-types) for inferring type information from Ruby code, and as such it may not always be able to provide accurate type information. - -## Advanced - ### Making custom/undocumented requests -This library is typed for convenient access to the documented API. - -If you need to access undocumented endpoints, params, or response properties, the library can still be used. - #### Undocumented request params If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` under the `request_options:` parameter when making a requests as seen in examples above. @@ -248,15 +218,15 @@ To make requests to undocumented endpoints, you can make requests using `client. ```ruby response = client.request( - method: :post, - path: '/undocumented/endpoint', - query: {"dog": "woof"}, - headers: {"useful-header": "interesting-value"}, - body: {"he": "llo"}, - ) + method: :post, + path: '/undocumented/endpoint', + query: {"dog": "woof"}, + headers: {"useful-header": "interesting-value"}, + body: {"he": "llo"}, +) ``` -### Concurrency & Connection Pooling +### Concurrency & connection pooling The `OpenAI::Client` instances are thread-safe, and should be re-used across multiple threads. By default, each `Client` have their own HTTP connection pool, with a maximum number of connections equal to thread count. @@ -266,6 +236,33 @@ Unless otherwise specified, other classes in the SDK do not have locks protectin Currently, `OpenAI::Client` instances are only fork-safe if there are no in-flight HTTP requests. +### Sorbet + +#### Enums + +Sorbet's typed enums require sub-classing of the [`T::Enum` class](https://sorbet.org/docs/tenum) from the `sorbet-runtime` gem. + +Since this library does not depend on `sorbet-runtime`, it uses a [`T.all` intersection type](https://sorbet.org/docs/intersection-types) with a ruby primitive type to construct a "tagged alias" instead. + +```ruby +module OpenAI::Models::ChatModel + # This alias aids language service driven navigation. + TaggedSymbol = T.type_alias { T.all(Symbol, OpenAI::Models::ChatModel) } +end +``` + +#### Argument passing trick + +It is possible to pass a compatible model / parameter class to a method that expects keyword arguments by using the `**` splat operator. + +```ruby +params = OpenAI::Models::Chat::CompletionCreateParams.new( + messages: [OpenAI::Models::Chat::ChatCompletionUserMessageParam.new(role: :user, content: "Say this is a test")], + model: :"gpt-4.1" +) +openai.chat.completions.create(**params) +``` + ## Versioning This package follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions. As the library is in initial development and has a major version of `0`, APIs may change at any time. @@ -275,3 +272,7 @@ This package considers improvements to the (non-runtime) `*.rbi` and `*.rbs` typ ## Requirements Ruby 3.1.0 or higher. + +## Contributing + +See [the contributing documentation](https://github.com/openai/openai-ruby/tree/main/CONTRIBUTING.md). diff --git a/Rakefile b/Rakefile index 7a8155db..dde46bf9 100644 --- a/Rakefile +++ b/Rakefile @@ -21,7 +21,7 @@ end desc("Preview docs; use `PORT=` to change the port") multitask(:"docs:preview") do - sh(*%w[yard server --bind [::] --reload --quiet --port], ENV.fetch("PORT", "8808")) + sh(*%w[yard server --reload --quiet --bind [::] --port], ENV.fetch("PORT", "8808")) end desc("Run test suites; use `TEST=path/to/test.rb` to run a specific test file") @@ -111,7 +111,7 @@ end desc("Typecheck everything") multitask(typecheck: [:"typecheck:steep", :"typecheck:sorbet"]) -desc("Lint everything") +desc("Lint and typecheck") multitask(lint: [:"lint:rubocop", :typecheck]) desc("Build yard docs") diff --git a/lib/openai.rb b/lib/openai.rb index 66af9d43..68a9c048 100644 --- a/lib/openai.rb +++ b/lib/openai.rb @@ -19,15 +19,6 @@ # We already ship the preferred sorbet manifests in the package itself. # `tapioca` currently does not offer us a way to opt out of unnecessary compilation. if Object.const_defined?(:Tapioca) && caller.chain([$PROGRAM_NAME]).chain(ARGV).grep(/tapioca/) - Warning.warn( - <<~WARN - \n - ⚠️ skipped loading of "openai" gem under `tapioca`. - - This message is normal and expected if you are running a `tapioca` command, and does not impact `.rbi` generation. - \n - WARN - ) return end From 924fcb42fa0991c491246df7667c72022190f1ee Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 18:08:55 +0000 Subject: [PATCH 12/15] feat: implement `#hash` for data containers --- .../transport/pooled_net_requester.rb | 6 +---- lib/openai/internal/type/array_of.rb | 3 +++ lib/openai/internal/type/base_model.rb | 24 +++++++++---------- lib/openai/internal/type/enum.rb | 3 +++ lib/openai/internal/type/hash_of.rb | 3 +++ lib/openai/internal/type/union.rb | 5 +++- rbi/lib/openai/internal/type/array_of.rbi | 3 +++ rbi/lib/openai/internal/type/base_model.rbi | 6 +++++ rbi/lib/openai/internal/type/enum.rbi | 3 +++ rbi/lib/openai/internal/type/hash_of.rbi | 3 +++ rbi/lib/openai/internal/type/union.rbi | 3 +++ sig/openai/internal/type/array_of.rbs | 2 ++ sig/openai/internal/type/base_model.rbs | 4 ++++ sig/openai/internal/type/enum.rbs | 2 ++ sig/openai/internal/type/hash_of.rbs | 2 ++ sig/openai/internal/type/union.rbs | 2 ++ test/openai/internal/type/base_model_test.rb | 5 +++- 17 files changed, 60 insertions(+), 19 deletions(-) diff --git a/lib/openai/internal/transport/pooled_net_requester.rb b/lib/openai/internal/transport/pooled_net_requester.rb index df4e1205..67e58347 100644 --- a/lib/openai/internal/transport/pooled_net_requester.rb +++ b/lib/openai/internal/transport/pooled_net_requester.rb @@ -57,15 +57,11 @@ def calibrate_socket_timeout(conn, deadline) # @return [Array(Net::HTTPGenericRequest, Proc)] def build_request(request, &blk) method, url, headers, body = request.fetch_values(:method, :url, :headers, :body) - - # ensure we construct a URI class of the right scheme - url = URI(url.to_s) - req = Net::HTTPGenericRequest.new( method.to_s.upcase, !body.nil?, method != :head, - url + URI(url.to_s) # ensure we construct a URI class of the right scheme ) headers.each { req[_1] = _2 } diff --git a/lib/openai/internal/type/array_of.rb b/lib/openai/internal/type/array_of.rb index 84109d94..d0d77538 100644 --- a/lib/openai/internal/type/array_of.rb +++ b/lib/openai/internal/type/array_of.rb @@ -44,6 +44,9 @@ def ==(other) # rubocop:enable Layout/LineLength end + # @return [Integer] + def hash = [self.class, item_type].hash + # @api private # # @param value [Array, Object] diff --git a/lib/openai/internal/type/base_model.rb b/lib/openai/internal/type/base_model.rb index e0849cca..043039d4 100644 --- a/lib/openai/internal/type/base_model.rb +++ b/lib/openai/internal/type/base_model.rb @@ -4,14 +4,6 @@ module OpenAI module Internal module Type # @abstract - # - # @example - # # `comparison_filter` is a `OpenAI::Models::ComparisonFilter` - # comparison_filter => { - # key: key, - # type: type, - # value: value - # } class BaseModel extend OpenAI::Internal::Type::Converter @@ -93,11 +85,13 @@ def fields state: state ) end - rescue StandardError + rescue StandardError => e cls = self.class.name.split("::").last - # rubocop:disable Layout/LineLength - message = "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}. To get the unparsed API response, use #{cls}[:#{__method__}]." - # rubocop:enable Layout/LineLength + message = [ + "Failed to parse #{cls}.#{__method__} from #{value.class} to #{target.inspect}.", + "To get the unparsed API response, use #{cls}[#{__method__.inspect}].", + "Cause: #{e.message}" + ].join(" ") raise OpenAI::Errors::ConversionError.new(message) end end @@ -171,6 +165,9 @@ def optional(name_sym, type_info, spec = {}) def ==(other) other.is_a?(Class) && other <= OpenAI::Internal::Type::BaseModel && other.fields == fields end + + # @return [Integer] + def hash = fields.hash end # @param other [Object] @@ -178,6 +175,9 @@ def ==(other) # @return [Boolean] def ==(other) = self.class == other.class && @data == other.to_h + # @return [Integer] + def hash = [self.class, @data].hash + class << self # @api private # diff --git a/lib/openai/internal/type/enum.rb b/lib/openai/internal/type/enum.rb index 8f14e58b..afdeb5f9 100644 --- a/lib/openai/internal/type/enum.rb +++ b/lib/openai/internal/type/enum.rb @@ -62,6 +62,9 @@ def ==(other) # rubocop:enable Style/CaseEquality end + # @return [Integer] + def hash = values.to_set.hash + # @api private # # Unlike with primitives, `Enum` additionally validates that the value is a member diff --git a/lib/openai/internal/type/hash_of.rb b/lib/openai/internal/type/hash_of.rb index 5a6d6304..40c7a89a 100644 --- a/lib/openai/internal/type/hash_of.rb +++ b/lib/openai/internal/type/hash_of.rb @@ -59,6 +59,9 @@ def ==(other) # rubocop:enable Layout/LineLength end + # @return [Integer] + def hash = [self.class, item_type].hash + # @api private # # @param value [Hash{Object=>Object}, Object] diff --git a/lib/openai/internal/type/union.rb b/lib/openai/internal/type/union.rb index 14a1a191..5f9048cf 100644 --- a/lib/openai/internal/type/union.rb +++ b/lib/openai/internal/type/union.rb @@ -43,7 +43,7 @@ module Union # # @return [Array] protected def derefed_variants - @known_variants.map { |key, variant_fn| [key, variant_fn.call] } + known_variants.map { |key, variant_fn| [key, variant_fn.call] } end # All of the specified variants for this union. @@ -128,6 +128,9 @@ def ==(other) OpenAI::Internal::Type::Union === other && other.derefed_variants == derefed_variants end + # @return [Integer] + def hash = variants.hash + # @api private # # @param value [Object] diff --git a/rbi/lib/openai/internal/type/array_of.rbi b/rbi/lib/openai/internal/type/array_of.rbi index 8af5fd34..e7a84e1a 100644 --- a/rbi/lib/openai/internal/type/array_of.rbi +++ b/rbi/lib/openai/internal/type/array_of.rbi @@ -32,6 +32,9 @@ module OpenAI sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + sig { returns(Integer) } + def hash; end + # @api private sig do override diff --git a/rbi/lib/openai/internal/type/base_model.rbi b/rbi/lib/openai/internal/type/base_model.rbi index bc1959ae..cf8285ae 100644 --- a/rbi/lib/openai/internal/type/base_model.rbi +++ b/rbi/lib/openai/internal/type/base_model.rbi @@ -111,11 +111,17 @@ module OpenAI sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + + sig { returns(Integer) } + def hash; end end sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + sig { returns(Integer) } + def hash; end + class << self # @api private sig do diff --git a/rbi/lib/openai/internal/type/enum.rbi b/rbi/lib/openai/internal/type/enum.rbi index e1d0753c..8dcaa918 100644 --- a/rbi/lib/openai/internal/type/enum.rbi +++ b/rbi/lib/openai/internal/type/enum.rbi @@ -28,6 +28,9 @@ module OpenAI sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + sig { returns(Integer) } + def hash; end + # @api private # # Unlike with primitives, `Enum` additionally validates that the value is a member diff --git a/rbi/lib/openai/internal/type/hash_of.rbi b/rbi/lib/openai/internal/type/hash_of.rbi index 4edc379f..0faca34e 100644 --- a/rbi/lib/openai/internal/type/hash_of.rbi +++ b/rbi/lib/openai/internal/type/hash_of.rbi @@ -32,6 +32,9 @@ module OpenAI sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + sig { returns(Integer) } + def hash; end + # @api private sig do override diff --git a/rbi/lib/openai/internal/type/union.rbi b/rbi/lib/openai/internal/type/union.rbi index 2e47dfe1..13b7409c 100644 --- a/rbi/lib/openai/internal/type/union.rbi +++ b/rbi/lib/openai/internal/type/union.rbi @@ -47,6 +47,9 @@ module OpenAI sig { params(other: T.anything).returns(T::Boolean) } def ==(other); end + sig { returns(Integer) } + def hash; end + # @api private sig do override diff --git a/sig/openai/internal/type/array_of.rbs b/sig/openai/internal/type/array_of.rbs index 0489e6e2..80fcc2a2 100644 --- a/sig/openai/internal/type/array_of.rbs +++ b/sig/openai/internal/type/array_of.rbs @@ -15,6 +15,8 @@ module OpenAI def ==: (top other) -> bool + def hash: -> Integer + def coerce: ( ::Array[top] | top value, state: OpenAI::Internal::Type::Converter::coerce_state diff --git a/sig/openai/internal/type/base_model.rbs b/sig/openai/internal/type/base_model.rbs index e3a7d42c..177b22b8 100644 --- a/sig/openai/internal/type/base_model.rbs +++ b/sig/openai/internal/type/base_model.rbs @@ -49,8 +49,12 @@ module OpenAI def self.==: (top other) -> bool + def self.hash: -> Integer + def ==: (top other) -> bool + def hash: -> Integer + def self.coerce: ( OpenAI::Internal::Type::BaseModel | ::Hash[top, top] | top value, state: OpenAI::Internal::Type::Converter::coerce_state diff --git a/sig/openai/internal/type/enum.rbs b/sig/openai/internal/type/enum.rbs index 4de50b6d..897ae9eb 100644 --- a/sig/openai/internal/type/enum.rbs +++ b/sig/openai/internal/type/enum.rbs @@ -10,6 +10,8 @@ module OpenAI def ==: (top other) -> bool + def hash: -> Integer + def coerce: ( String | Symbol | top value, state: OpenAI::Internal::Type::Converter::coerce_state diff --git a/sig/openai/internal/type/hash_of.rbs b/sig/openai/internal/type/hash_of.rbs index e23bc0c3..26f65397 100644 --- a/sig/openai/internal/type/hash_of.rbs +++ b/sig/openai/internal/type/hash_of.rbs @@ -15,6 +15,8 @@ module OpenAI def ==: (top other) -> bool + def hash: -> Integer + def coerce: ( ::Hash[top, top] | top value, state: OpenAI::Internal::Type::Converter::coerce_state diff --git a/sig/openai/internal/type/union.rbs b/sig/openai/internal/type/union.rbs index 8b58f419..86c308a6 100644 --- a/sig/openai/internal/type/union.rbs +++ b/sig/openai/internal/type/union.rbs @@ -30,6 +30,8 @@ module OpenAI def ==: (top other) -> bool + def hash: -> Integer + def coerce: ( top value, state: OpenAI::Internal::Type::Converter::coerce_state diff --git a/test/openai/internal/type/base_model_test.rb b/test/openai/internal/type/base_model_test.rb index b03987cf..a3267818 100644 --- a/test/openai/internal/type/base_model_test.rb +++ b/test/openai/internal/type/base_model_test.rb @@ -647,14 +647,17 @@ def test_equality [U0.new(String, Float), U0.new(String, Integer)] => false, [U1, U2] => true, [M1, M2] => false, - [M1, M3] => true + [M1, M3] => true, + [M1.new(a: 1), M1.new(a: 1)] => true } cases.each do if _2 assert_equal(*_1) + assert_equal(*_1.map(&:hash)) else refute_equal(*_1) + refute_equal(*_1.map(&:hash)) end end end From c745aef51359e1df2118006dd69470fc4714de5a Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 19:04:49 +0000 Subject: [PATCH 13/15] chore: update README with recommended editor plugins --- CONTRIBUTING.md | 21 +++++++-------------- README.md | 20 +++++++++----------- 2 files changed, 16 insertions(+), 25 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d800af1c..6f93458e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -96,20 +96,17 @@ $ bundle exec rake format ## Editor Support -### Solargraph +### Ruby LSP -This library includes [Solargraph](https://solargraph.org) support for both auto-completion and go to definition. +[Ruby LSP](https://github.com/Shopify/ruby-lsp) has quite good support for go to definition, but not auto-completion. -```ruby -gem "solargraph", group: :development -``` +This can be installed along side Solargraph. + +### Solargraph -Note: if you had installed the gem locally using `git: "..."` or `path: "..."`, you must update your [`.solargraph.yml`](https://solargraph.org/guides/configuration) to include the path to where the gem is located: +[Solargraph](https://solargraph.org) has quite good support for auto-completion, but not go to definition. -```yaml -include: - - '/lib/**/*.rb' -``` +This can be installed along side Ruby LSP. ### Sorbet @@ -119,10 +116,6 @@ include: 2. For each generic type in `*.rbi` files, a spurious "Duplicate type member" error is present. -### Ruby LSP - -The Ruby LSP has [best effort support](https://shopify.github.io/ruby-lsp/#guessed-types) for inferring type information from Ruby code, and as such it may not always be able to provide accurate type information. - ## Documentation Preview To preview the documentation, run: diff --git a/README.md b/README.md index d5936411..8d369bc2 100644 --- a/README.md +++ b/README.md @@ -176,19 +176,11 @@ openai.chat.completions.create( ) ``` -## Editor support - -Some editor language services like [Solargraph](https://github.com/castwide/solargraph?tab=readme-ov-file#gem-support) or [Sorbet](https://sorbet.org/docs/rbi#the-hidden-definitions-rbi) require a manually triggered indexing step before functionalities like auto-completion and go to definition can operate. - -Please refer to their respective documentation for details. This library also includes a [short guide](https://github.com/openai/openai-ruby/tree/main/CONTRIBUTING.md#editor-support) on how to set up various editor services for internal development. - -## Advanced Concepts - -### Model DSL +## Model DSL -This library uses a Model DSL to represent request parameters and response shapes in `lib/openai/models`. +This library uses a simple DSL to represent request parameters and response shapes in `lib/openai/models`. -The model classes service as anchor points for both toolchain readable documentation, and language service assisted navigation links. This information also allows the SDK's internals to perform translation between plain and rich data types; e.g., conversion between a `Time` instance and an ISO8601 `String`, and vice versa. +With the right [editor plugins](https://shopify.github.io/ruby-lsp), you can ctrl-click on elements of the DSL to navigate around and explore the library. In all places where a `BaseModel` type is specified, vanilla Ruby `Hash` can also be used. For example, the following are interchangeable as arguments: @@ -206,6 +198,12 @@ params = { } ``` +## Editor support + +A combination of [Shopify LSP](https://shopify.github.io/ruby-lsp) and [Solargraph](https://solargraph.org/) is recommended for non-[Sorbet](https://sorbet.org) users. The former is especially good at go to definition, while the latter has much better auto-completion support. + +## Advanced concepts + ### Making custom/undocumented requests #### Undocumented request params From 8a79cc0a6396f0989feaefb2d4d30b6c1dc75dfe Mon Sep 17 00:00:00 2001 From: dogisgreat Date: Fri, 18 Apr 2025 16:19:42 -0400 Subject: [PATCH 14/15] chore: update readme with temporary install instructions --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 8d369bc2..33a1ebe9 100644 --- a/README.md +++ b/README.md @@ -10,12 +10,14 @@ The REST API documentation can be found on [platform.openai.com](https://platfor ## Installation +ℹ️ The `openai` gem is not yet available on [rubygems.org](https://rubygems.org). + To use this gem, install via Bundler by adding the following to your application's `Gemfile`: ```ruby -gem "openai", "~> 0.1.0.pre.alpha.4" +gem "openai", github: "openai/openai-ruby", branch: "main" ``` From a36c585e65fc4d795ba22292fd3f474fea1f3b14 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Fri, 18 Apr 2025 20:20:36 +0000 Subject: [PATCH 15/15] release: 0.1.0-alpha.5 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 27 +++++++++++++++++++++++++++ Gemfile.lock | 2 +- lib/openai/version.rb | 2 +- 4 files changed, 30 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b56c3d0b..e8285b71 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.0-alpha.4" + ".": "0.1.0-alpha.5" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index e967daf8..c505717e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,32 @@ # Changelog +## 0.1.0-alpha.5 (2025-04-18) + +Full Changelog: [v0.1.0-alpha.4...v0.1.0-alpha.5](https://github.com/openai/openai-ruby/compare/v0.1.0-alpha.4...v0.1.0-alpha.5) + +### Features + +* implement `#hash` for data containers ([924fcb4](https://github.com/openai/openai-ruby/commit/924fcb42fa0991c491246df7667c72022190f1ee)) + + +### Bug Fixes + +* always send idempotency header when specified as a request option ([548bfaf](https://github.com/openai/openai-ruby/commit/548bfaf81a4947860ec35ff7efafb144da4863bb)) +* **client:** send correct HTTP path ([896142a](https://github.com/openai/openai-ruby/commit/896142abf1bb03f1eb48e0754cbff04edd081a0e)) + + +### Chores + +* documentation improvements ([ff43d73](https://github.com/openai/openai-ruby/commit/ff43d73fe6e514d5b9f110892c3880998df8dacf)) +* **internal:** configure releases ([7eb9185](https://github.com/openai/openai-ruby/commit/7eb91852c03eaba177464060631c2645d3db63d0)) +* **internal:** contribute.md and contributor QoL improvements ([d060adf](https://github.com/openai/openai-ruby/commit/d060adf81aadb6b138428bdbde79633fd0dff230)) +* make sorbet enums easier to read ([7c03213](https://github.com/openai/openai-ruby/commit/7c0321329658a6d2823f9022a77be5965186b94c)) +* refine `#inspect` and `#to_s` for model classes ([84308a6](https://github.com/openai/openai-ruby/commit/84308a6683e6ed9d520b05e0ac828de662fe0198)) +* simplify yard annotations by removing most `@!parse` directives ([16ec2e3](https://github.com/openai/openai-ruby/commit/16ec2e391b0cfdf49727099be9afa220c7ab16e5)) +* update README with recommended editor plugins ([c745aef](https://github.com/openai/openai-ruby/commit/c745aef51359e1df2118006dd69470fc4714de5a)) +* update readme with temporary install instructions ([8a79cc0](https://github.com/openai/openai-ruby/commit/8a79cc0a6396f0989feaefb2d4d30b6c1dc75dfe)) +* use `@!method` instead of `@!parse` for virtual method type definitions ([b5fba2e](https://github.com/openai/openai-ruby/commit/b5fba2e689884dc011dec9fd2d8349c9c842d274)) + ## 0.1.0-alpha.4 (2025-04-16) Full Changelog: [v0.1.0-alpha.3...v0.1.0-alpha.4](https://github.com/openai/openai-ruby/compare/v0.1.0-alpha.3...v0.1.0-alpha.4) diff --git a/Gemfile.lock b/Gemfile.lock index d5303e6f..0891755f 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -11,7 +11,7 @@ GIT PATH remote: . specs: - openai (0.1.0.pre.alpha.4) + openai (0.1.0.pre.alpha.5) connection_pool GEM diff --git a/lib/openai/version.rb b/lib/openai/version.rb index 3790ca0e..c92a3443 100644 --- a/lib/openai/version.rb +++ b/lib/openai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OpenAI - VERSION = "0.1.0.pre.alpha.4" + VERSION = "0.1.0.pre.alpha.5" end